diff --git "a/issues-datasets.jsonl" "b/issues-datasets.jsonl" --- "a/issues-datasets.jsonl" +++ "b/issues-datasets.jsonl" @@ -1,1000 +1,3 @@ -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789","id":967361934,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA5NTQwMzY5","number":2789,"title":"Updated dataset description of DaNE","user":{"login":"KennethEnevoldsen","id":23721977,"node_id":"MDQ6VXNlcjIzNzIxOTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23721977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KennethEnevoldsen","html_url":"https:\/\/github.com\/KennethEnevoldsen","followers_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/followers","following_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/repos","events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-11T19:58:48Z","updated_at":"2021-08-11T19:58:48Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2789","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789.patch"},"body":null,"performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2788","id":967149389,"node_id":"MDU6SXNzdWU5NjcxNDkzODk=","number":2788,"title":"How to sample every file in a list of files making up a split in a dataset when loading?","user":{"login":"brijow","id":11220949,"node_id":"MDQ6VXNlcjExMjIwOTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11220949?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/brijow","html_url":"https:\/\/github.com\/brijow","followers_url":"https:\/\/api.github.com\/users\/brijow\/followers","following_url":"https:\/\/api.github.com\/users\/brijow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/brijow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/brijow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/brijow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/brijow\/orgs","repos_url":"https:\/\/api.github.com\/users\/brijow\/repos","events_url":"https:\/\/api.github.com\/users\/brijow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/brijow\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-11T17:43:21Z","updated_at":"2021-08-11T17:43:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I am loading a dataset with multiple train, test, and validation files like this:\r\n\r\n```\r\ndata_files_dict = {\r\n \"train\": [train_file1, train_file2],\r\n \"test\": [test_file1, test_file2],\r\n \"val\": [val_file1, val_file2]\r\n}\r\ndataset = datasets.load_dataset(\r\n \"csv\",\r\n data_files=data_files_dict,\r\n split=['train[:8]', 'test[:8]', 'val[:8]']\r\n)\r\n\r\n```\r\n\r\nHowever, this only selects the first 8 rows from train_file1, test_file1, val_file1, since they are the first files in the lists.\r\n\r\nI'm trying to formulate a split argument that can sample from each file specified in my list of files that make up each split.\r\n\r\nIs this type of splitting supported? If so, how can I do it?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2787","id":967018406,"node_id":"MDU6SXNzdWU5NjcwMTg0MDY=","number":2787,"title":"ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com","user":{"login":"jinec","id":39627475,"node_id":"MDQ6VXNlcjM5NjI3NDc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39627475?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jinec","html_url":"https:\/\/github.com\/jinec","followers_url":"https:\/\/api.github.com\/users\/jinec\/followers","following_url":"https:\/\/api.github.com\/users\/jinec\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jinec\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jinec\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jinec\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jinec\/orgs","repos_url":"https:\/\/api.github.com\/users\/jinec\/repos","events_url":"https:\/\/api.github.com\/users\/jinec\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jinec\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-11T16:19:01Z","updated_at":"2021-08-11T17:09:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello,\r\nI am trying to run run_glue.py and it gives me this error -\r\n\r\nTraceback (most recent call last):\r\n File \"E:\/BERT\/pytorch_hugging\/transformers\/examples\/pytorch\/text-classification\/run_glue.py\", line 546, in \r\n main()\r\n File \"E:\/BERT\/pytorch_hugging\/transformers\/examples\/pytorch\/text-classification\/run_glue.py\", line 250, in main\r\n datasets = load_dataset(\"glue\", data_args.task_name, cache_dir=model_args.cache_dir)\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\load.py\", line 718, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\load.py\", line 320, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\utils\\file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\utils\\file_utils.py\", line 623, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.7.0\/datasets\/glue\/glue.py\r\n\r\nTrying to do python run_glue.py --model_name_or_path\r\nbert-base-cased\r\n--task_name\r\nmrpc\r\n--do_train\r\n--do_eval\r\n--max_seq_length\r\n128\r\n--per_device_train_batch_size\r\n32\r\n--learning_rate\r\n2e-5\r\n--num_train_epochs\r\n3\r\n--output_dir\r\n.\/tmp\/mrpc\/\r\n\r\nIs this something on my end? From what I can tell, this was re-fixeded by @fullyz a few months ago.\r\nThank you!\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786","id":966282934,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA4NTQwMzU0","number":2786,"title":"Support streaming compressed files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-11T09:02:06Z","updated_at":"2021-08-11T13:11:36Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2786","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786.patch"},"body":"Add support to stream compressed files (current options in fsspec):\r\n- bz2\r\n- lz4\r\n- xz\r\n- zstd\r\n\r\ncc: @lewtun ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783","id":965461382,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3NzcxOTM3","number":2783,"title":"Add KS task to SUPERB","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-10T22:14:07Z","updated_at":"2021-08-11T20:19:17Z","closed_at":"2021-08-11T20:19:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2783","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783.patch"},"body":"Add the KS (keyword spotting) task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051).\r\n\r\n- [s3prl instructions](https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/README.md#ks-keyword-spotting)\r\n- [s3prl implementation](https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/speech_commands\/dataset.py)\r\n- [TFDS implementation](https:\/\/github.com\/tensorflow\/datasets\/blob\/master\/tensorflow_datasets\/audio\/speech_commands.py)\r\n\r\nSome notable quirks:\r\n- The dataset is originally single-archive (train+val+test all in one), but the test set has a \"canonical\" distribution in a separate archive, which is also used here (see `_split_ks_files()`). \r\n- The `_background_noise_`\/`_silence_` audio files are much longer than others, so they require some sort of slicing for downstream training. I decided to leave the implementation of that up to the users, since TFDS and s3prl take different approaches (either slicing wavs deterministically, or subsampling randomly at runtime)\r\n\r\nRelated to #2619.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782","id":964858439,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MjQ5NDE5","number":2782,"title":"Fix renaming of corpus_bleu args","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T11:02:34Z","updated_at":"2021-08-10T11:16:07Z","closed_at":"2021-08-10T11:16:07Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2782","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782.patch"},"body":"Last `sacrebleu` release (v2.0.0) has renamed `sacrebleu.corpus_bleu` args from `(sys_stream, ref_streams)` to `(hipotheses, references)`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR passes the args without parameter names, so that it is valid for all versions of `sacrebleu`.\r\n\r\nThis is a partial hotfix of #2781.\r\n\r\nClose #2781.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2781","id":964805351,"node_id":"MDU6SXNzdWU5NjQ4MDUzNTE=","number":2781,"title":"Latest v2.0.0 release of sacrebleu has broken some metrics","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-08-10T09:59:41Z","updated_at":"2021-08-10T11:16:07Z","closed_at":"2021-08-10T11:16:07Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nAfter `sacrebleu` v2.0.0 release (see changes here: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15), some of `datasets` metrics are broken:\r\n- Default tokenizer `sacrebleu.DEFAULT_TOKENIZER` no longer exists:\r\n - #2739\r\n - #2778\r\n- Bleu tokenizers are no longer accessible with `sacrebleu.TOKENIZERS`:\r\n - #2779\r\n- `corpus_bleu` args have been renamed from `(sys_stream, ref_streams)` to `(hipotheses, references)`: \r\n - #2782 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780","id":964794764,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTk2NjA3","number":2780,"title":"VIVOS dataset for Vietnamese ASR","user":{"login":"binh234","id":57580923,"node_id":"MDQ6VXNlcjU3NTgwOTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57580923?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/binh234","html_url":"https:\/\/github.com\/binh234","followers_url":"https:\/\/api.github.com\/users\/binh234\/followers","following_url":"https:\/\/api.github.com\/users\/binh234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/binh234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/binh234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/binh234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/binh234\/orgs","repos_url":"https:\/\/api.github.com\/users\/binh234\/repos","events_url":"https:\/\/api.github.com\/users\/binh234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/binh234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T09:47:36Z","updated_at":"2021-08-11T14:09:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2780","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780.patch"},"body":null,"performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779","id":964775085,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTgwNTgw","number":2779,"title":"Fix sacrebleu tokenizers","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T09:24:27Z","updated_at":"2021-08-10T11:03:08Z","closed_at":"2021-08-10T10:57:54Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2779","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779.patch"},"body":"Last `sacrebleu` release (v2.0.0) has removed `sacrebleu.TOKENIZERS`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR makes a hot fix of the bug by using a private function in `sacrebleu`: `sacrebleu.metrics.bleu._get_tokenizer()`.\r\n\r\nEventually, this should be further fixed in order to use only public functions.\r\n\r\nThis is a partial hotfix of #2781.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778","id":964737422,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTQ5MTk2","number":2778,"title":"Do not pass tokenize to sacrebleu","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T08:40:37Z","updated_at":"2021-08-10T10:03:37Z","closed_at":"2021-08-10T10:03:37Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2778","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778.patch"},"body":"Last `sacrebleu` release (v2.0.0) has removed `sacrebleu.DEFAULT_TOKENIZER`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR does not pass `tokenize` to `sacrebleu` (note that the user cannot pass it anyway) and `sacrebleu` will use its default, no matter where it is and how it is called.\r\n\r\nRelated to #2739.\r\n\r\nThis is a partial hotfix of #2781.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777","id":964696380,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTEzNzg3","number":2777,"title":"Use packaging to handle versions","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T07:51:39Z","updated_at":"2021-08-10T11:32:51Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2777","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777.patch"},"body":"Use packaging module to handle\/validate\/check versions of Python packages.\r\n\r\nRelated to #2769.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2776","id":964400596,"node_id":"MDU6SXNzdWU5NjQ0MDA1OTY=","number":2776,"title":"document `config.HF_DATASETS_OFFLINE` and precedence","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-09T21:23:17Z","updated_at":"2021-08-09T21:23:17Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976 implemented `HF_DATASETS_OFFLINE`, but:\r\n1. `config.HF_DATASETS_OFFLINE` is not documented\r\n2. the precedence is not documented (env, config)\r\n\r\nI'm thinking it probably should be similar to what it says https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#from-the-huggingface-hub about `datasets.config.IN_MEMORY_MAX_SIZE`:\r\n\r\nQuote:\r\n> The default in \ud83e\udd17 Datasets is to memory-map the dataset on disk unless you set datasets.config.IN_MEMORY_MAX_SIZE different from 0 bytes (default). In that case, the dataset will be copied in-memory if its size is smaller than datasets.config.IN_MEMORY_MAX_SIZE bytes, and memory-mapped otherwise. This behavior can be enabled by setting either the configuration option datasets.config.IN_MEMORY_MAX_SIZE (higher precedence) or the environment variable HF_DATASETS_IN_MEMORY_MAX_SIZE (lower precedence) to nonzero.\r\n\r\nContext: trying to use `config.HF_DATASETS_OFFLINE` here:\r\nhttps:\/\/github.com\/bigscience-workshop\/Megatron-DeepSpeed\/pull\/48\r\nbut are uncertain if it's safe, since it's not documented as a public API.\r\n\r\nThank you!\r\n\r\n@lhoestq, @albertvillanova ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2775","id":964303626,"node_id":"MDU6SXNzdWU5NjQzMDM2MjY=","number":2775,"title":"`generate_random_fingerprint()` deterministic with \ud83e\udd17Transformers' `set_seed()`","user":{"login":"mbforbes","id":1170062,"node_id":"MDQ6VXNlcjExNzAwNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1170062?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mbforbes","html_url":"https:\/\/github.com\/mbforbes","followers_url":"https:\/\/api.github.com\/users\/mbforbes\/followers","following_url":"https:\/\/api.github.com\/users\/mbforbes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mbforbes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mbforbes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mbforbes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mbforbes\/orgs","repos_url":"https:\/\/api.github.com\/users\/mbforbes\/repos","events_url":"https:\/\/api.github.com\/users\/mbforbes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mbforbes\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-09T19:28:51Z","updated_at":"2021-08-10T17:33:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n**Update:** I dug into this to try to reproduce the underlying issue, and I believe it's that `set_seed()` from the `transformers` library makes the \"random\" fingerprint identical each time. I believe this is still a bug, because `datasets` is used exactly this way in `transformers` after `set_seed()` has been called, and I think that using `set_seed()` is a standard procedure to aid reproducibility. I've added more details to reproduce this below.\r\n\r\nHi there! I'm using my own local dataset and custom preprocessing function. My preprocessing function seems to be unpickle-able, perhaps because it is from a closure (will debug this separately). I get this warning, which is expected:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/450b9174765374111e5c6daab0ed294bc3d9b639\/src\/datasets\/fingerprint.py#L260-L265\r\n\r\nHowever, what's not expected is that the `datasets` actually _does_ seem to cache and reuse this dataset between runs! After that line, the next thing that's logged looks like:\r\n\r\n```text\r\n Loading cached processed dataset at \/home\/xxx\/.cache\/huggingface\/datasets\/csv\/default-xxx\/0.0.0\/xxx\/cache-xxx.arrow\r\n```\r\n\r\nThe path is exactly the same each run (e.g., last 26 runs).\r\n\r\nThis becomes a problem because I'll pass in the `--max_eval_samples` flag to the HuggingFace example script I'm running off of ([run_swag.py](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/pytorch\/multiple-choice\/run_swag.py)). The fact that the cached dataset is reused means this flag gets ignored. I'll try to load 100 examples, and it will load the full cached 1,000,000.\r\n\r\nI think that\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/450b9174765374111e5c6daab0ed294bc3d9b639\/src\/datasets\/fingerprint.py#L248\r\n\r\n... is actually consistent because randomness is being controlled in HuggingFace\/Transformers for reproducibility. I've added a demo of this below.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\n# Contents of print_fingerprint.py\r\nfrom transformers import set_seed\r\nfrom datasets.fingerprint import generate_random_fingerprint\r\nset_seed(42)\r\nprint(generate_random_fingerprint())\r\n```\r\n\r\n```bash\r\nfor i in {0..10}; do\r\n python print_fingerprint.py\r\ndone\r\n\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n```\r\n\r\n## Expected results\r\nAfter the \"random hash\" warning is emitted, a random hash is generated, and no outdated cached datasets are reused.\r\n\r\n## Actual results\r\nAfter the \"random hash\" warning is emitted, an identical hash is generated each time, and an outdated cached dataset is reused each run.\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-5.8.0-1038-gcp-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 4.0.1","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774","id":963932199,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA2NDY2MDc0","number":2774,"title":"Prevent .map from using multiprocessing when loading from cache","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-09T12:11:38Z","updated_at":"2021-08-11T09:12:01Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2774","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774.patch"},"body":"## Context\r\n\r\nOn our setup, we use different setup to train vs proprocessing datasets. Usually we are able to obtain a high number of cpus to preprocess, which allows us to use `num_proc` however we can't use as many during training phase. Currently if we use `num_proc={whatever the preprocessing value was}` we load from cache, but we get:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/pool.py\", line 131, in worker\r\n put((job, i, result))\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/queues.py\", line 371, in put\r\n self._writer.send_bytes(obj)\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 203, in send_bytes\r\n self._send_bytes(m[offset:offset + size])\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 414, in _send_bytes\r\n self._send(header + buf)\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 371, in _send\r\n n = write(self._handle, buf)\r\nBrokenPipeError: [Errno 32] Broken pipe\r\n```\r\n\r\nOur current guess, is that we're spawning too many workers compared to the umber of cpus available, and it's running OOM. (Currently we're testing if this fixes the issue) (**UPDATE**: sequentiality has fixed our issue)\r\n\r\nTherefore something that might be interesting, would be the ability to load a multiprocessed dataset sequentially? Or at least agnostic of the num_proc (as they are only used to dertermine the number of shards), and allow users to choose how to load the shards?\r\n\r\n## Current issues\r\n\r\n~I'm having a hard time making fingerprints match. For some reason, the multiprocessing and the sequential version generate two different hash.~\r\n\r\n**EDIT**: Turns out multiprocessing and sequential have different `transform` value for fingerprinting (check `fingerprint_transform`) when running `_map_single`:\r\n - sequential : `datasets.arrow_dataset.Dataset._map_single`\r\n - multiprocessing: `datasets.arrow_dataset._map_single`\r\n \r\n This discrepancy is caused by multiprocessing pickling the transformer function, it doesn't see to keep the `Dataset` hierarchy. I'm still unclear on why `func.__qual_name__` isn't handled correctly in multiprocessing. But replacing `__qualname__` by `__name__` fixes the issue.\r\n\r\n## What was done\r\n\r\n~We try to prevent the usage of multiprocessing when loading a dataset. Instead we load all cached shards sequentially.~\r\n\r\nI couldn't find a nice way to obtain the cached_file_name and check they all exist before deciding to use the multiprocessing flow or not. Instead I expose an optional boolean `sequential` in `map` method.\r\n\r\n## TODO\r\n - [x] Check that the multiprocessed version and the sequential version output the same output\r\n - [x] Check that sequential can load multiprocessed\r\n - [x] Check that multiprocessed can load sequential\r\n \r\n ## Test\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom multiprocessing import Pool\r\nimport random\r\n\r\ndef process(batch, rng):\r\n length = len(batch[\"text\"])\r\n return {**batch, \"processed_text\": [f\"PROCESSED {rng.random()}\" for _ in range(length)]}\r\n\r\ndataset = load_dataset(\"stas\/openwebtext-10k\", split=\"train\")\r\nprint(dataset.column_names)\r\nprint(type(dataset))\r\n\r\nrng = random.Random(42)\r\ndataset1 = dataset.map(process, batched=True, batch_size=50, num_proc=4, fn_kwargs={\"rng\": rng})\r\n\r\n# This one should be loaded from cache\r\nrng = random.Random(42)\r\ndataset2 = dataset.map(process, batched=True, batch_size=50, num_proc=4, fn_kwargs={\"rng\": rng}, sequential=True)\r\n\r\n# Just to check that the random generator was correct\r\nprint(dataset1[-1][\"processed_text\"])\r\nprint(dataset2[-1][\"processed_text\"])\r\n```\r\n \r\n ## Other solutions\r\n\r\nI chose to load everything sequentially, but we can probably find a way to load shards in parallel using another number of workers (essentially this would be an argument not used for fingerprinting, allowing to allow `m` shards using `n` processes, which would be very useful when same dataset have to be loaded on two different setup, and we still want to leverage cache).\r\n\r\ncc @lhoestq (since I had asked you previously on `num_proc` being used for fingerprinting). Don't know if this is acceptable.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2773","id":963730497,"node_id":"MDU6SXNzdWU5NjM3MzA0OTc=","number":2773,"title":"Remove dataset_infos.json","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-09T07:43:19Z","updated_at":"2021-08-09T07:43:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nAs discussed, there are infos in the `dataset_infos.json` which are redundant and we could have them only in the README file.\r\n\r\nOthers could be migrated to the README, like: \"dataset_size\", \"size_in_bytes\", \"download_size\", \"splits.split_name.[num_bytes, num_examples]\",...\r\n\r\nHowever, there are others that do not seem too meaningful in the README, like the checksums.\r\n\r\n**Describe the solution you'd like**\r\nOpen a discussion to decide what to do with the `dataset_infos.json` files: which information to be migrated and\/or which information to be kept.\r\n\r\ncc: @julien-c @lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2772","id":963348834,"node_id":"MDU6SXNzdWU5NjMzNDg4MzQ=","number":2772,"title":"Remove returned feature constrain","user":{"login":"PosoSAgapo","id":33200481,"node_id":"MDQ6VXNlcjMzMjAwNDgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33200481?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PosoSAgapo","html_url":"https:\/\/github.com\/PosoSAgapo","followers_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/followers","following_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/repos","events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-08T04:01:30Z","updated_at":"2021-08-08T08:48:01Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"In the current version, the returned value of the map function has to be list or ndarray. However, this makes it unsuitable for many tasks. In NLP, many features are sparse like verb words, noun chunks, if we want to assign different values to different words, which will result in a large sparse matrix if we only score useful words like verb words. \r\n\r\nMostly, when using it on large scale, saving it as a whole takes a lot of disk storage and making it hard to read, the normal method is saving it in sparse form. However, the NumPy does not support sparse, therefore I have to use PyTorch or scipy to transform a matrix into special sparse form, which is not a form that can be transformed into list or ndarry. This violates the feature constraints of the map function. \r\n\r\nI do appreciate the convenience of Datasets package, but I do not think the compulsory datatype constrain is necessary, in some cases, we just cannot transform it into a list or ndarray due to some reasons. Any way to fix this? Or what I can do to disable the compulsory datatype constrain?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771","id":963257036,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1OTExMDMw","number":2771,"title":"[WIP][Common Voice 7] Add common voice 7.0","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-07T16:01:10Z","updated_at":"2021-08-07T16:19:38Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2771","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771.patch"},"body":"This PR allows to load the new common voice dataset manually as explained when doing: \r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\".\/datasets\/datasets\/common_voice_7\", \"ab\")\r\n```\r\n\r\n=>\r\n\r\n```\r\n Please follow the manual download instructions:\r\n\r\n You need to manually the dataset from `https:\/\/commonvoice.mozilla.org\/en\/datasets`.\r\n Make sure you choose the version `Common Voice Corpus 7.0`.\r\n Choose a language of your choice and find the corresponding language-id, *e.g.*, `Abkhaz` with language-id `ab`. The following language-ids are available:\r\n\r\n ['ab', 'ar', 'as', 'az', 'ba', 'bas', 'be', 'bg', 'br', 'ca', 'cnh', 'cs', 'cv', 'cy', 'de', 'dv', 'el', 'en', 'eo', 'es', 'et', 'eu', 'fa', 'fi', 'fr', 'fy-NL', 'ga-IE', 'gl', 'gn', 'ha', 'hi', 'hsb', 'hu', 'hy-AM', 'ia', 'id', 'it', 'ja', 'ka', 'kab', 'kk', 'kmr', 'ky', 'lg', 'lt', 'lv', 'mn', 'mt', 'nl', 'or', 'pa-IN', 'pl', 'pt', 'rm-sursilv', 'rm-vallader', 'ro', 'ru', 'rw', 'sah', 'sk', 'sl', 'sr', 'sv-SE', 'ta', 'th', 'tr', 'tt', 'ug', 'uk', 'ur', 'uz', 'vi', 'vot', 'zh-CN', 'zh-HK', 'zh-TW']\r\n\r\n Next, you will have to enter your email address to download the dataset in the `tar.gz` format. Save the file under .\r\n The file should then be extracted with: ``tar -xvzf `` which will extract a folder called ``cv-corpus-7.0-2021-07-21``.\r\n The dataset can then be loaded with `datasets.load_dataset(\"common_voice\", , data_dir=\"\", ignore_verifications=True).\r\n```\r\n\r\nHaving followed those instructions one can then download the data as follows: \r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\".\/datasets\/datasets\/common_voice_7\", \"ab\", data_dir=\".\/cv-corpus-7.0-2021-07-21\/\", ignore_verifications=True)\r\n```\r\n\r\n## TODO\r\n- [ ] Discuss naming. Is the name ok here \"common_voice_7\"? The dataset script differs only really in one point from `common_voice.py` in that all the metadata is different (more hours etc...) and that it has to use manual data dir for now\r\n- [ ] Ideally we should get a bundled download link. For `common_voice.py` there is a bundled download link: `https:\/\/voice-prod-bundler-ee1969a6ce8178826482b88e843c335139bd3fb4.s3.amazonaws.com\/cv-corpus-6.1-2020-12-11\/{}.tar.gz` that allows one to directly download the data. However such a link is missing for Common Voice 7. I guess we should try to contact common voice about it and ask whether we could host the data or help otherwise somehow. See: https:\/\/github.com\/common-voice\/common-voice-bundler\/issues\/15 cc @yjernite \r\n- [ ] I did not compute the dataset.json and it would mean that I'd have to download 76 datasets totalling around 1TB manually before running the checksum command. This just takes too much time. For now the user will have to add a `ignore_verifications=True` to download the data. This step would also be much easier if we could get a bundled link\r\n- [ ] Add dummy data","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770","id":963246512,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1OTAzMzIy","number":2770,"title":"Add support for fast tokenizer in BertScore","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-07T15:00:03Z","updated_at":"2021-08-09T12:34:43Z","closed_at":"2021-08-09T11:16:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2770","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770.patch"},"body":"This PR adds support for a fast tokenizer in BertScore, which has been added recently to the lib.\r\nFixes #2765 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769","id":963240802,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1ODk5MTYy","number":2769,"title":"Allow PyArrow from source","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-07T14:26:44Z","updated_at":"2021-08-09T15:38:39Z","closed_at":"2021-08-09T15:38:39Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2769","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769.patch"},"body":"When installing pyarrow from source the version is:\r\n\r\n```python\r\n>>> import pyarrow; pyarrow.__version__\r\n'2.1.0.dev612'\r\n```\r\n\r\n-> however this breaks the install check at init of `datasets`. This PR makes sure that everything coming after the last `'.'` is removed.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2768","id":963229173,"node_id":"MDU6SXNzdWU5NjMyMjkxNzM=","number":2768,"title":"`ArrowInvalid: Added column's length must match table's length.` after using `select`","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-07T13:17:29Z","updated_at":"2021-08-09T11:26:43Z","closed_at":"2021-08-09T11:26:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI would like to add a column to a downsampled dataset. However I get an error message saying the length don't match with the length of the unsampled dataset indicated. I suspect that the dataset size is not updated when calling `select`.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"tweets_hate_speech_detection\")['train'].select(range(128))\r\nds = ds.add_column('ones', [1]*128)\r\n```\r\n\r\n## Expected results\r\nI would expect a new column named `ones` filled with `1`. When I check the length of `ds` it says `128`. Interestingly, it works when calling `ds = ds.map(lambda x: x)` before adding the column.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n```python\r\n---------------------------------------------------------------------------\r\nArrowInvalid Traceback (most recent call last)\r\n\/var\/folders\/l4\/2905jygx4tx5jv8_kn03vxsw0000gn\/T\/ipykernel_6301\/868709636.py in \r\n 1 from datasets import load_dataset\r\n 2 ds = load_dataset(\"tweets_hate_speech_detection\")['train'].select(range(128))\r\n----> 3 ds = ds.add_column('ones', [0]*128)\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 183 }\r\n 184 # apply actual function\r\n--> 185 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 186 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 187 # re-apply format to the output\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 395 # Call actual function\r\n 396 \r\n--> 397 out = func(self, *args, **kwargs)\r\n 398 \r\n 399 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in add_column(self, name, column, new_fingerprint)\r\n 2965 column_table = InMemoryTable.from_pydict({name: column})\r\n 2966 # Concatenate tables horizontally\r\n-> 2967 table = ConcatenationTable.from_tables([self._data, column_table], axis=1)\r\n 2968 # Update features\r\n 2969 info = self.info.copy()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in from_tables(cls, tables, axis)\r\n 715 table_blocks = to_blocks(table)\r\n 716 blocks = _extend_blocks(blocks, table_blocks, axis=axis)\r\n--> 717 return cls.from_blocks(blocks)\r\n 718 \r\n 719 @property\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in from_blocks(cls, blocks)\r\n 663 return cls(table, blocks)\r\n 664 else:\r\n--> 665 table = cls._concat_blocks_horizontally_and_vertically(blocks)\r\n 666 return cls(table, blocks)\r\n 667 \r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in _concat_blocks_horizontally_and_vertically(cls, blocks)\r\n 623 if not tables:\r\n 624 continue\r\n--> 625 pa_table_horizontally_concatenated = cls._concat_blocks(tables, axis=1)\r\n 626 pa_tables_to_concat_vertically.append(pa_table_horizontally_concatenated)\r\n 627 return cls._concat_blocks(pa_tables_to_concat_vertically, axis=0)\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in _concat_blocks(blocks, axis)\r\n 612 else:\r\n 613 for name, col in zip(table.column_names, table.columns):\r\n--> 614 pa_table = pa_table.append_column(name, col)\r\n 615 return pa_table\r\n 616 else:\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.append_column()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.add_column()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: Added column's length must match table's length. Expected length 31962 but got length 128\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.11.0\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 5.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2767","id":963002120,"node_id":"MDU6SXNzdWU5NjMwMDIxMjA=","number":2767,"title":"equal operation to perform unbatch for huggingface datasets ","user":{"login":"dorooddorood606","id":79288051,"node_id":"MDQ6VXNlcjc5Mjg4MDUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79288051?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorooddorood606","html_url":"https:\/\/github.com\/dorooddorood606","followers_url":"https:\/\/api.github.com\/users\/dorooddorood606\/followers","following_url":"https:\/\/api.github.com\/users\/dorooddorood606\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorooddorood606\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorooddorood606\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorooddorood606\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorooddorood606\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorooddorood606\/repos","events_url":"https:\/\/api.github.com\/users\/dorooddorood606\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorooddorood606\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-08-06T19:45:52Z","updated_at":"2021-08-07T19:56:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI need to use \"unbatch\" operation in tensorflow on a huggingface dataset, I could not find this operation, could you kindly direct me how I can do it, here is the problem I am trying to solve:\r\n\r\nI am considering \"record\" dataset in SuperGlue and I need to replicate each entery of the dataset for each answer, to make it similar to what T5 originally did:\r\n\r\nhttps:\/\/github.com\/google-research\/text-to-text-transfer-transformer\/blob\/3c58859b8fe72c2dbca6a43bc775aa510ba7e706\/t5\/data\/preprocessors.py#L925\r\n\r\nHere please find an example:\r\n\r\n For example, a typical example from ReCoRD might look like\r\n {\r\n 'passsage': 'This is the passage.',\r\n 'query': 'A @placeholder is a bird.',\r\n 'entities': ['penguin', 'potato', 'pigeon'],\r\n 'answers': ['penguin', 'pigeon'],\r\n }\r\n and I need a prosessor which would turn this example into the following two examples:\r\n {\r\n 'inputs': 'record query: A @placeholder is a bird. entities: penguin, '\r\n 'potato, pigeon passage: This is the passage.',\r\n 'targets': 'penguin',\r\n }\r\n and\r\n {\r\n 'inputs': 'record query: A @placeholder is a bird. entities: penguin, '\r\n 'potato, pigeon passage: This is the passage.',\r\n 'targets': 'pigeon',\r\n }\r\n\r\n\r\nFor doing this, one need unbatch, as each entry can map to multiple samples depending on the number of answers, I am not sure how to perform this operation with huggingface datasets library and greatly appreciate your help\r\n\r\n@lhoestq \r\n\r\nThank you very much.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766","id":962994198,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1NzAyNjM5","number":2766,"title":"fix typo (ShuffingConfig -> ShufflingConfig)","user":{"login":"daleevans","id":4944007,"node_id":"MDQ6VXNlcjQ5NDQwMDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944007?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/daleevans","html_url":"https:\/\/github.com\/daleevans","followers_url":"https:\/\/api.github.com\/users\/daleevans\/followers","following_url":"https:\/\/api.github.com\/users\/daleevans\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/daleevans\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/daleevans\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/daleevans\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/daleevans\/orgs","repos_url":"https:\/\/api.github.com\/users\/daleevans\/repos","events_url":"https:\/\/api.github.com\/users\/daleevans\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/daleevans\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-06T19:31:40Z","updated_at":"2021-08-10T14:17:03Z","closed_at":"2021-08-10T14:17:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2766","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766.patch"},"body":"pretty straightforward, it should be Shuffling instead of Shuffing","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2765","id":962861395,"node_id":"MDU6SXNzdWU5NjI4NjEzOTU=","number":2765,"title":"BERTScore Error","user":{"login":"gagan3012","id":49101362,"node_id":"MDQ6VXNlcjQ5MTAxMzYy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49101362?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gagan3012","html_url":"https:\/\/github.com\/gagan3012","followers_url":"https:\/\/api.github.com\/users\/gagan3012\/followers","following_url":"https:\/\/api.github.com\/users\/gagan3012\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gagan3012\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gagan3012\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gagan3012\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gagan3012\/orgs","repos_url":"https:\/\/api.github.com\/users\/gagan3012\/repos","events_url":"https:\/\/api.github.com\/users\/gagan3012\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gagan3012\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-06T15:58:57Z","updated_at":"2021-08-09T11:16:25Z","closed_at":"2021-08-09T11:16:25Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\npredictions = [\"hello there\", \"general kenobi\"]\r\nreferences = [\"hello there\", \"general kenobi\"]\r\nbert = load_metric('bertscore')\r\nbert.compute(predictions=predictions, references=references,lang='en')\r\n```\r\n\r\n# Bug\r\n`TypeError: get_hash() missing 1 required positional argument: 'use_fast_tokenizer'`\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: Colab \r\n- Python version:\r\n- PyArrow version:\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764","id":962554799,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1MzI3MDQ5","number":2764,"title":"Add DER metric for SUPERB speaker diarization task","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-06T09:12:36Z","updated_at":"2021-08-06T10:06:53Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2764","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764.patch"},"body":null,"performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2763","id":961895523,"node_id":"MDU6SXNzdWU5NjE4OTU1MjM=","number":2763,"title":"English wikipedia datasets is not clean","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-05T14:37:24Z","updated_at":"2021-08-05T14:37:41Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWikipedia english dumps contain many wikipedia paragraphs like \"References\", \"Category:\" and \"See Also\" that should not be used for training.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import load_dataset\r\nw = load_dataset('wikipedia', '20200501.en')\r\nprint(w['train'][0]['text'])\r\n```\r\n\r\n> 'Yangliuqing () is a market town in Xiqing District, in the western suburbs of Tianjin, People\\'s Republic of China. Despite its relatively small size, it has been named since 2006 in the \"famous historical and cultural market towns in China\".\\n\\nIt is best known in China for creating nianhua or Yangliuqing nianhua. For more than 400 years, Yangliuqing has in effect specialised in the creation of these woodcuts for the New Year. wood block prints using vivid colourschemes to portray traditional scenes of children\\'s games often interwoven with auspiciouse objects.\\n\\n, it had 27 residential communities () and 25 villages under its administration.\\n\\nShi Family Grand Courtyard\\n\\nShi Family Grand Courtyard (Ti\u0101nj\u012bn Sh\u00ed Ji\u0101 D\u00e0 Yu\u00e0n, \u5929\u6d25\u77f3\u5bb6\u5927\u9662) is situated in Yangliuqing Town of Xiqing District, which is the former residence of wealthy merchant Shi Yuanshi - the 4th son of Shi Wancheng, one of the eight great masters in Tianjin. First built in 1875, it covers over 6,000 square meters, including large and small yards and over 200 folk houses, a theater and over 275 rooms that served as apartments and places of business and worship for this powerful family. Shifu Garden, which finished its expansion in October 2003, covers 1,200 square meters, incorporates the elegance of imperial garden and delicacy of south garden. Now the courtyard of Shi family covers about 10,000 square meters, which is called the first mansion in North China. Now it serves as the folk custom museum in Yangliuqing, which has a large collection of folk custom museum in Yanliuqing, which has a large collection of folk art pieces like Yanliuqing New Year pictures, brick sculpture.\\n\\nShi\\'s ancestor came from Dong\\'e County in Shandong Province, engaged in water transport of grain. As the wealth gradually accumulated, the Shi Family moved to Yangliuqing and bought large tracts of land and set up their residence. Shi Yuanshi came from the fourth generation of the family, who was a successful businessman and a good household manager, and the residence was thus enlarged for several times until it acquired the present scale. It is believed to be the first mansion in the west of Tianjin.\\n\\nThe residence is symmetric based on the axis formed by a passageway in the middle, on which there are four archways. On the east side of the courtyard, there are traditional single-story houses with rows of rooms around the four sides, which was once the living area for the Shi Family. The rooms on north side were the accountants\\' office. On the west are the major constructions including the family hall for worshipping Buddha, theater and the south reception room. On both sides of the residence are side yard rooms for maids and servants.\\n\\nToday, the Shi mansion, located in the township of Yangliuqing to the west of central Tianjin, stands as a surprisingly well-preserved monument to China\\'s pre-revolution mercantile spirit. It also serves as an on-location shoot for many of China\\'s popular historical dramas. Many of the rooms feature period furniture, paintings and calligraphy, and the extensive Shifu Garden.\\n\\nPart of the complex has been turned into the Yangliuqing Museum, which includes displays focused on symbolic aspects of the courtyards\\' construction, local folk art and customs, and traditional period furnishings and crafts.\\n\\n**See also \\n\\nList of township-level divisions of Tianjin\\n\\nReferences \\n\\n http:\/\/arts.cultural-china.com\/en\/65Arts4795.html\\n\\nCategory:Towns in Tianjin'**\r\n\r\n## Expected results\r\nI expect no junk in the data.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2762","id":961652046,"node_id":"MDU6SXNzdWU5NjE2NTIwNDY=","number":2762,"title":"Add RVL-CDIP dataset","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-05T09:57:05Z","updated_at":"2021-08-05T09:57:22Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** RVL-CDIP\r\n- **Description:** The RVL-CDIP (Ryerson Vision Lab Complex Document Information Processing) dataset consists of 400,000 grayscale images in 16 classes, with 25,000 images per class. There are 320,000 training images, 40,000 validation images, and 40,000 test images. The images are sized so their largest dimension does not exceed 1000 pixels.\r\n- **Paper:** https:\/\/www.cs.cmu.edu\/~aharley\/icdar15\/\r\n- **Data:** https:\/\/www.cs.cmu.edu\/~aharley\/rvl-cdip\/\r\n- **Motivation:** I'm currently adding LayoutLMv2 and LayoutXLM to HuggingFace Transformers. LayoutLM (v1) already exists in the library. This dataset has a large value for document image classification (i.e. classifying scanned documents). LayoutLM models obtain SOTA on this dataset, so would be great to directly use it in notebooks.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2761","id":961568287,"node_id":"MDU6SXNzdWU5NjE1NjgyODc=","number":2761,"title":"Error loading C4 realnewslike dataset","user":{"login":"danshirron","id":32061512,"node_id":"MDQ6VXNlcjMyMDYxNTEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32061512?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danshirron","html_url":"https:\/\/github.com\/danshirron","followers_url":"https:\/\/api.github.com\/users\/danshirron\/followers","following_url":"https:\/\/api.github.com\/users\/danshirron\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danshirron\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danshirron\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danshirron\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danshirron\/orgs","repos_url":"https:\/\/api.github.com\/users\/danshirron\/repos","events_url":"https:\/\/api.github.com\/users\/danshirron\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danshirron\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-08-05T08:16:58Z","updated_at":"2021-08-08T19:44:34Z","closed_at":"2021-08-08T19:44:34Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nError loading C4 realnewslike dataset. Validation part mismatch\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n raw_datasets = load_dataset('c4', 'realnewslike', cache_dir=model_args.cache_dir)\r\n## Expected results\r\nsuccess on data loading\r\n## Actual results\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 15.3M\/15.3M [00:00<00:00, 28.1MB\/s]Traceback (most recent call last): \r\n File \"run_mlm_tf.py\", line 794, in \r\n main() \r\n File \"run_mlm_tf.py\", line 425, in main \r\n raw_datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name, cache_dir=model_args.cache_dir) File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 843, in load_dataset \r\n builder_instance.download_and_prepare( \r\n File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 608, in download_and_prepare \r\n self._download_and_prepare( \r\n File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 698, in _download_and_prepare verify_splits(self.info.splits, split_dict) File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 74, in verify_splits \r\n raise NonMatchingSplitsSizesError(str(bad_splits)) \r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='validation', num_bytes=38165657946, num_examples=13799838, dataset_name='c4'), 'recorded': SplitInfo(name='validation', num_bytes=37875873, num_examples=13863, dataset_name='c4')}] \r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: Linux-5.4.0-58-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2760","id":961372667,"node_id":"MDU6SXNzdWU5NjEzNzI2Njc=","number":2760,"title":"Add Nuswide dataset","user":{"login":"shivangibithel","id":19774925,"node_id":"MDQ6VXNlcjE5Nzc0OTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19774925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shivangibithel","html_url":"https:\/\/github.com\/shivangibithel","followers_url":"https:\/\/api.github.com\/users\/shivangibithel\/followers","following_url":"https:\/\/api.github.com\/users\/shivangibithel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shivangibithel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shivangibithel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shivangibithel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shivangibithel\/orgs","repos_url":"https:\/\/api.github.com\/users\/shivangibithel\/repos","events_url":"https:\/\/api.github.com\/users\/shivangibithel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shivangibithel\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-05T03:00:41Z","updated_at":"2021-08-05T03:00:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *NUSWIDE*\r\n- **Description:** *[A Real-World Web Image Dataset from National University of Singapore](https:\/\/lms.comp.nus.edu.sg\/wp-content\/uploads\/2019\/research\/nuswide\/NUS-WIDE.html)*\r\n- **Paper:** *[here](https:\/\/lms.comp.nus.edu.sg\/wp-content\/uploads\/2019\/research\/nuswide\/nuswide-civr2009.pdf)*\r\n- **Data:** *[here](https:\/\/github.com\/wenting-zhao\/nuswide)*\r\n- **Motivation:** *This dataset is a benchmark in the Text Retrieval task.*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2759","id":960636572,"node_id":"MDU6SXNzdWU5NjA2MzY1NzI=","number":2759,"title":"the meteor metric seems not consist with the official version","user":{"login":"jianguda","id":9079360,"node_id":"MDQ6VXNlcjkwNzkzNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9079360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jianguda","html_url":"https:\/\/github.com\/jianguda","followers_url":"https:\/\/api.github.com\/users\/jianguda\/followers","following_url":"https:\/\/api.github.com\/users\/jianguda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jianguda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jianguda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jianguda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jianguda\/orgs","repos_url":"https:\/\/api.github.com\/users\/jianguda\/repos","events_url":"https:\/\/api.github.com\/users\/jianguda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jianguda\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-04T15:33:17Z","updated_at":"2021-08-04T17:18:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe computed meteor score seems strange because the value is very different from the scores computed by other tools. For example, I use the meteor score computed by [NLGeval](https:\/\/github.com\/Maluuba\/nlg-eval) as the reference (which reuses the official jar file for the computation)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_metric\r\nfrom nlgeval import NLGEval, compute_individual_metrics\r\n\r\nmeteor = load_metric('meteor')\r\npredictions = [\"It is a guide to action which ensures that the military always obeys the commands of the party\"]\r\nreferences = [\"It is a guide to action that ensures that the military will forever heed Party commands\"]\r\nresults = meteor.compute(predictions=predictions, references=references)\r\n# print the actual result\r\nprint(round(results[\"meteor\"], 4))\r\nmetrics_dict = compute_individual_metrics(references, predictions[0])\r\n# print the expected result\r\nprint(round(metrics_dict[\"METEOR\"], 4))\r\n```\r\nBy the way, you need to install the `nlg-eval` library first. Please check the installation guide [here](https:\/\/github.com\/Maluuba\/nlg-eval#setup), thanks!\r\n\r\n## Expected results\r\n`0.4474`\r\n\r\n## Actual results\r\n`0.7398`\r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758","id":960206575,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAzMjQ5Nzky","number":2758,"title":"Raise ManualDownloadError when loading a dataset that requires previous manual download","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-04T10:19:55Z","updated_at":"2021-08-04T11:36:30Z","closed_at":"2021-08-04T11:36:30Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2758","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758.patch"},"body":"This PR implements the raising of a `ManualDownloadError` when loading a dataset that requires previous manual download, and this is missing.\r\n\r\nThe `ManualDownloadError` is raised whether the dataset is loaded in normal or streaming mode.\r\n\r\nClose #2749.\r\n\r\ncc: @severo ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2757","id":959984081,"node_id":"MDU6SXNzdWU5NTk5ODQwODE=","number":2757,"title":"Unexpected type after `concatenate_datasets`","user":{"login":"JulesBelveze","id":32683010,"node_id":"MDQ6VXNlcjMyNjgzMDEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32683010?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JulesBelveze","html_url":"https:\/\/github.com\/JulesBelveze","followers_url":"https:\/\/api.github.com\/users\/JulesBelveze\/followers","following_url":"https:\/\/api.github.com\/users\/JulesBelveze\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JulesBelveze\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JulesBelveze\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JulesBelveze\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JulesBelveze\/orgs","repos_url":"https:\/\/api.github.com\/users\/JulesBelveze\/repos","events_url":"https:\/\/api.github.com\/users\/JulesBelveze\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JulesBelveze\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-04T07:10:39Z","updated_at":"2021-08-04T16:01:24Z","closed_at":"2021-08-04T16:01:23Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to concatenate two `Dataset` using `concatenate_datasets` but it turns out that after concatenation the features are casted from `torch.Tensor` to `list`. \r\nIt then leads to a weird tensors when trying to convert it to a `DataLoader`. However, if I use each `Dataset` separately everything behave as expected.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> featurized_teacher\r\nDataset({\r\n features: ['t_labels', 't_input_ids', 't_token_type_ids', 't_attention_mask'],\r\n num_rows: 502\r\n})\r\n>>> for f in featurized_teacher.features:\r\n print(featurized_teacher[f].shape)\r\ntorch.Size([502])\r\ntorch.Size([502, 300])\r\ntorch.Size([502, 300])\r\ntorch.Size([502, 300])\r\n\r\n>>> featurized_student\r\nDataset({\r\n features: ['s_features', 's_labels'],\r\n num_rows: 502\r\n})\r\n>>> for f in featurized_student.features:\r\n print(featurized_student[f].shape)\r\ntorch.Size([502, 64])\r\ntorch.Size([502])\r\n```\r\nThe shapes seem alright to me. Then the results after concatenation are as follow:\r\n```python\r\n>>> concat_dataset = datasets.concatenate_datasets([featurized_student, featurized_teacher], axis=1)\r\n>>> type(concat_dataset[\"t_labels\"])\r\n\r\n```\r\nOne would expect to obtain the same type as the one before concatenation.\r\n\r\nAm I doing something wrong here? Any idea on how to fix this unexpected behavior?\r\n\r\n## Environment info\r\n- `datasets` version: 1.9.0\r\n- Platform: macOS-10.14.6-x86_64-i386-64bit\r\n- Python version: 3.9.5\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756","id":959255646,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMzk4Mjk1","number":2756,"title":"Fix metadata JSON for ubuntu_dialogs_corpus dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T15:48:59Z","updated_at":"2021-08-04T09:43:25Z","closed_at":"2021-08-04T09:43:25Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2756","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755","id":959115888,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjgwMjI4","number":2755,"title":"Fix metadata JSON for turkish_movie_sentiment dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T13:25:44Z","updated_at":"2021-08-04T09:06:54Z","closed_at":"2021-08-04T09:06:53Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2755","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754","id":959105577,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjcxMjM4","number":2754,"title":"Generate metadata JSON for telugu_books dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T13:14:52Z","updated_at":"2021-08-04T08:49:02Z","closed_at":"2021-08-04T08:49:02Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2754","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753","id":959036995,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjEyMjMz","number":2753,"title":"Generate metadata JSON for reclor dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:52:29Z","updated_at":"2021-08-04T08:07:15Z","closed_at":"2021-08-04T08:07:15Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2753","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752","id":959023608,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjAxMjAy","number":2752,"title":"Generate metadata JSON for lm1b dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:34:56Z","updated_at":"2021-08-04T06:40:40Z","closed_at":"2021-08-04T06:40:39Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2752","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751","id":959021262,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMTk5MjA5","number":2751,"title":"Update metadata for wikihow dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:31:57Z","updated_at":"2021-08-03T15:52:09Z","closed_at":"2021-08-03T15:52:09Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2751","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751.patch"},"body":"Update metadata for wikihow dataset:\r\n- Remove leading new line character in description and citation\r\n- Update metadata JSON\r\n- Remove no longer necessary `urls_checksums\/checksums.txt` file\r\n\r\nRelated to #2748.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2750","id":958984730,"node_id":"MDU6SXNzdWU5NTg5ODQ3MzA=","number":2750,"title":"Second concatenation of datasets produces errors","user":{"login":"Aktsvigun","id":36672861,"node_id":"MDQ6VXNlcjM2NjcyODYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36672861?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Aktsvigun","html_url":"https:\/\/github.com\/Aktsvigun","followers_url":"https:\/\/api.github.com\/users\/Aktsvigun\/followers","following_url":"https:\/\/api.github.com\/users\/Aktsvigun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Aktsvigun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Aktsvigun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Aktsvigun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Aktsvigun\/orgs","repos_url":"https:\/\/api.github.com\/users\/Aktsvigun\/repos","events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-08-03T10:47:04Z","updated_at":"2021-08-10T11:42:07Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nI am need to concatenate my dataset with others several times, and after I concatenate it for the second time, the features of features (e.g. tags names) are collapsed. This hinders, for instance, the usage of tokenize function with `data.map`.\r\n\r\n```\r\nfrom datasets import load_dataset, concatenate_datasets\r\n\r\ndata = load_dataset('trec')['train']\r\nconcatenated = concatenate_datasets([data, data])\r\nconcatenated_2 = concatenate_datasets([concatenated, concatenated])\r\nprint('True features of features:', concatenated.features)\r\nprint('\\nProduced features of features:', concatenated_2.features)\r\n```\r\noutputs \r\n\r\n```\r\nTrue features of features: {'label-coarse': ClassLabel(num_classes=6, names=['DESC', 'ENTY', 'ABBR', 'HUM', 'NUM', 'LOC'], names_file=None, id=None), 'label-fine': ClassLabel(num_classes=47, names=['manner', 'cremat', 'animal', 'exp', 'ind', 'gr', 'title', 'def', 'date', 'reason', 'event', 'state', 'desc', 'count', 'other', 'letter', 'religion', 'food', 'country', 'color', 'termeq', 'city', 'body', 'dismed', 'mount', 'money', 'product', 'period', 'substance', 'sport', 'plant', 'techmeth', 'volsize', 'instru', 'abb', 'speed', 'word', 'lang', 'perc', 'code', 'dist', 'temp', 'symbol', 'ord', 'veh', 'weight', 'currency'], names_file=None, id=None), 'text': Value(dtype='string', id=None)}\r\n\r\nProduced features of features: {'label-coarse': Value(dtype='int64', id=None), 'label-fine': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None)}\r\n```\r\n\r\nI am using `datasets` v.1.11.0","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2749","id":958968748,"node_id":"MDU6SXNzdWU5NTg5Njg3NDg=","number":2749,"title":"Raise a proper exception when trying to stream a dataset that requires to manually download files","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-03T10:26:27Z","updated_at":"2021-08-09T08:53:35Z","closed_at":"2021-08-04T11:36:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAt least for 'reclor', 'telugu_books', 'turkish_movie_sentiment', 'ubuntu_dialogs_corpus', 'wikihow', trying to `load_dataset` in streaming mode raises a `TypeError` without any detail about why it fails.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"reclor\", streaming=True)\r\n```\r\n\r\n## Expected results\r\n\r\nIdeally: raise a specific exception, something like `ManualDownloadError`.\r\n\r\nOr at least give the reason in the message, as when we load in normal mode:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"reclor\")\r\n```\r\n\r\n```\r\nAssertionError: The dataset reclor with config default requires manual data.\r\n Please follow the manual download instructions: to use ReClor you need to download it manually. Please go to its homepage (http:\/\/whyu.me\/reclor\/) fill the google\r\n form and you will receive a download link and a password to extract it.Please extract all files in one folder and use the path folder in datasets.load_dataset('reclor', data_dir='path\/to\/folder\/folder_name')\r\n .\r\n Manual data can be loaded with `datasets.load_dataset(reclor, data_dir='')\r\n```\r\n\r\n## Actual results\r\n\r\n```\r\nTypeError: expected str, bytes or os.PathLike object, not NoneType\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: macOS-11.5-x86_64-i386-64bit\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748","id":958889041,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMDg4NTk4","number":2748,"title":"Generate metadata JSON for wikihow dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T08:55:40Z","updated_at":"2021-08-03T10:17:51Z","closed_at":"2021-08-03T10:17:51Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2748","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747","id":958867627,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMDcwOTgy","number":2747,"title":"add multi-proc in `to_json`","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2021-08-03T08:30:13Z","updated_at":"2021-08-10T20:16:44Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2747","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747.patch"},"body":"Closes #2663. I've tried adding multiprocessing in `to_json`. Here's some benchmarking I did to compare the timings of current version (say v1) and multi-proc version (say v2). I did this with `cpu_count` 4 (2015 Macbook Air)\r\n\r\n1. Dataset name: `ascent_kb` - 8.9M samples (all samples were used, reporting this for a single run)\r\nv1- ~225 seconds for converting whole dataset to json\r\nv2- ~200 seconds for converting whole dataset to json\r\n\r\n2. Dataset name: `lama` - 1.3M samples (all samples were used, reporting this for 2 runs)\r\nv1- ~26 seconds for converting whole dataset to json\r\nv2- ~23.6 seconds for converting whole dataset to json\r\n\r\nI think it's safe to say that v2 is 10% faster as compared to v1. Timings may improve further with better configuration.\r\n\r\nThe only bottleneck I feel is writing to file from the output list. If we can improve that aspect then timings may improve further. \r\n\r\nLet me know if any changes\/improvements can be done in this @stas00, @lhoestq, @albertvillanova. @lhoestq even suggested to extend this work with other export methods as well like `csv` or `parquet`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2746","id":958551619,"node_id":"MDU6SXNzdWU5NTg1NTE2MTk=","number":2746,"title":"Cannot load `few-nerd` dataset","user":{"login":"Mehrad0711","id":28717374,"node_id":"MDQ6VXNlcjI4NzE3Mzc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28717374?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehrad0711","html_url":"https:\/\/github.com\/Mehrad0711","followers_url":"https:\/\/api.github.com\/users\/Mehrad0711\/followers","following_url":"https:\/\/api.github.com\/users\/Mehrad0711\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehrad0711\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehrad0711\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehrad0711\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehrad0711\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehrad0711\/repos","events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-02T22:18:57Z","updated_at":"2021-08-03T19:45:44Z","closed_at":"2021-08-03T19:45:43Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nCannot load `few-nerd` dataset.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset('few-nerd', 'supervised')\r\n```\r\n\r\n## Actual results\r\n\r\nExecuting above code will give the following error:\r\n\r\n```\r\nUsing the latest cached version of the module from \/Users\/Mehrad\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/few-nerd\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53 (last modified on Wed Jun 2 11:34:25 2021) since it couldn't be found locally at \/Users\/Mehrad\/Documents\/GitHub\/genienlp\/few-nerd\/few-nerd.py, or remotely (FileNotFoundError).\r\nDownloading and preparing dataset few_nerd\/supervised (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/Users\/Mehrad\/.cache\/huggingface\/datasets\/few_nerd\/supervised\/0.0.0\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53...\r\nTraceback (most recent call last):\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 693, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1107, in _prepare_split\r\n disable=bool(logging.get_verbosity() == logging.NOTSET),\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/tqdm\/std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"\/Users\/Mehrad\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/few-nerd\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53\/few-nerd.py\", line 196, in _generate_examples\r\n with open(filepath, encoding=\"utf-8\") as f:\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/Users\/Mehrad\/.cache\/huggingface\/datasets\/downloads\/supervised\/train.json'\r\n```\r\nThe bug is probably in identifying and downloading the dataset. If I download the json splits directly from [link](https:\/\/github.com\/nbroad1881\/few-nerd\/tree\/main\/uncompressed) and put them under the downloads directory, they will be processed into arrow format correctly. \r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Python version: 3.8\r\n- PyArrow version: 1.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745","id":958269579,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxNTc0Mjcz","number":2745,"title":"added semeval18_emotion_classification dataset","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-02T15:39:55Z","updated_at":"2021-08-06T13:02:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2745","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745.patch"},"body":"I added the data set of SemEval 2018 Task 1 (Subtask 5) for emotion detection in three languages.\r\n\r\n```\r\ndatasets-cli test datasets\/semeval18_emotion_classification\/ --save_infos --all_configs\r\n\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_real_dataset_semeval18_emotion_classification\r\n```\r\nBoth commands ran successfully.\r\n\r\nI couldn't create the dummy data (the files are tsvs but have .txt ending, maybe that's the problem?) and therefore the test on the dummy data fails, maybe someone can help here.\r\n\r\nI also formatted the code:\r\n```\r\nblack --line-length 119 --target-version py36 datasets\/semeval18_emotion_classification\/\r\nisort datasets\/semeval18_emotion_classification\/\r\nflake8 datasets\/semeval18_emotion_classification\/\r\n```\r\nThat's the publication for reference:\r\n\r\nMohammad, S., Bravo-Marquez, F., Salameh, M., & Kiritchenko, S. (2018). SemEval-2018 task 1: Affect in tweets. Proceedings of the 12th International Workshop on Semantic Evaluation, 1\u201317. https:\/\/doi.org\/10.18653\/v1\/S18-1001","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744","id":958146637,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxNDY4NDcz","number":2744,"title":"Fix key by recreating metadata JSON for journalists_questions dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T13:27:53Z","updated_at":"2021-08-03T09:25:34Z","closed_at":"2021-08-03T09:25:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2744","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744.patch"},"body":"Close #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2743","id":958119251,"node_id":"MDU6SXNzdWU5NTgxMTkyNTE=","number":2743,"title":"Dataset JSON is incorrect","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-02T13:01:26Z","updated_at":"2021-08-03T10:06:57Z","closed_at":"2021-08-03T09:25:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThe JSON file generated for https:\/\/github.com\/huggingface\/datasets\/blob\/573f3d35081cee239d1b962878206e9abe6cde91\/datasets\/journalists_questions\/journalists_questions.py is https:\/\/github.com\/huggingface\/datasets\/blob\/573f3d35081cee239d1b962878206e9abe6cde91\/datasets\/journalists_questions\/dataset_infos.json.\r\n\r\nThe only config should be `plain_text`, but the first key in the JSON is `journalists_questions` (the dataset id) instead.\r\n\r\n```json\r\n{\r\n \"journalists_questions\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n## Steps to reproduce the bug\r\n\r\nLook at the files.\r\n\r\n## Expected results\r\n\r\nThe first key should be `plain_text`:\r\n\r\n```json\r\n{\r\n \"plain_text\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n## Actual results\r\n\r\n```json\r\n{\r\n \"journalists_questions\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2742","id":958114064,"node_id":"MDU6SXNzdWU5NTgxMTQwNjQ=","number":2742,"title":"Improve detection of streamable file types","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-02T12:55:09Z","updated_at":"2021-08-02T16:35:49Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\n```python\r\nfrom datasets import load_dataset_builder\r\nfrom datasets.utils.streaming_download_manager import StreamingDownloadManager\r\nbuilder = load_dataset_builder(\"journalists_questions\", name=\"plain_text\")\r\nbuilder._split_generators(StreamingDownloadManager(base_path=builder.base_path))\r\n```\r\n\r\nraises\r\n\r\n```\r\nNotImplementedError: Extraction protocol for file at https:\/\/drive.google.com\/uc?export=download&id=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U is not implemented yet\r\n```\r\n\r\nBut the file at https:\/\/drive.google.com\/uc?export=download&id=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U is a text file and it can be streamed:\r\n\r\n```bash\r\ncurl --header \"Range: bytes=0-100\" -L https:\/\/drive.google.com\/uc\\?export\\=download\\&id\\=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U\r\n506938088174940160 yes 1\r\n302221719412830209 yes 1\r\n289761704907268096 yes 1\r\n513820885032378369 yes %\r\n```\r\n\r\nYet, it's wrongly categorized as a file type that cannot be streamed because the test is currently based on 1. the presence of a file extension at the end of the URL (here: no extension), and 2. the inclusion of this extension in a list of supported formats.\r\n\r\n**Describe the solution you'd like**\r\n\r\nIn the case of an URL (instead of a local path), ask for the MIME type, and decide on that value? Note that it would not work in that case, because the value of `content_type` is `text\/html; charset=UTF-8`.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nAdd a variable in the dataset script to set the data format by hand.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2741","id":957979559,"node_id":"MDU6SXNzdWU5NTc5Nzk1NTk=","number":2741,"title":"Add Hypersim dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T10:06:50Z","updated_at":"2021-08-02T10:06:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Hypersim\r\n- **Description:** photorealistic synthetic dataset for holistic indoor scene understanding\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** https:\/\/github.com\/apple\/ml-hypersim\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740","id":957911035,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxMjY0NTI3","number":2740,"title":"Update release instructions","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T08:46:00Z","updated_at":"2021-08-02T14:39:56Z","closed_at":"2021-08-02T14:39:56Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2740","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740.patch"},"body":"Update release instructions.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739","id":957751260,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxMTI0ODQ3","number":2739,"title":"Pass tokenize to sacrebleu only if explicitly passed by user","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T05:09:05Z","updated_at":"2021-08-03T04:23:37Z","closed_at":"2021-08-03T04:23:37Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2739","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739.patch"},"body":"Next `sacrebleu` release (v2.0.0) will remove `sacrebleu.DEFAULT_TOKENIZER`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR passes `tokenize` to `sacrebleu` only if explicitly passed by the user, otherwise it will not pass it (and `sacrebleu` will use its default, no matter where it is and how it is called).\r\n\r\nClose: #2737.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738","id":957517746,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwOTI5NzA4","number":2738,"title":"Sunbird AI Ugandan low resource language dataset","user":{"login":"ak3ra","id":12105163,"node_id":"MDQ6VXNlcjEyMTA1MTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12105163?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ak3ra","html_url":"https:\/\/github.com\/ak3ra","followers_url":"https:\/\/api.github.com\/users\/ak3ra\/followers","following_url":"https:\/\/api.github.com\/users\/ak3ra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ak3ra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ak3ra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ak3ra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ak3ra\/orgs","repos_url":"https:\/\/api.github.com\/users\/ak3ra\/repos","events_url":"https:\/\/api.github.com\/users\/ak3ra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ak3ra\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-01T15:18:00Z","updated_at":"2021-08-02T01:37:56Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2738","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738.patch"},"body":"Multi-way parallel text corpus of 5 key Ugandan languages for the task of machine translation. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2737","id":957124881,"node_id":"MDU6SXNzdWU5NTcxMjQ4ODE=","number":2737,"title":"SacreBLEU update","user":{"login":"devrimcavusoglu","id":46989091,"node_id":"MDQ6VXNlcjQ2OTg5MDkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46989091?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/devrimcavusoglu","html_url":"https:\/\/github.com\/devrimcavusoglu","followers_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/followers","following_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/repos","events_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-30T23:53:08Z","updated_at":"2021-08-03T04:23:37Z","closed_at":"2021-08-03T04:23:37Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"With the latest release of [sacrebleu](https:\/\/github.com\/mjpost\/sacrebleu), `datasets.metrics.sacrebleu` is broken, and getting error.\r\n\r\n AttributeError: module 'sacrebleu' has no attribute 'DEFAULT_TOKENIZER'\r\n\r\nthis happens since in new version of sacrebleu there is no `DEFAULT_TOKENIZER`, but sacrebleu.py tries to import it anyways. This can be fixed currently with fixing `sacrebleu==1.5.0`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nsacrebleu= datasets.load_metric('sacrebleu')\r\npredictions = [\"It is a guide to action which ensures that the military always obeys the commands of the party\"]\r\nreferences = [\"It is a guide to action that ensures that the military will forever heed Party commands\"]\r\nresults = sacrebleu.compute(predictions=predictions, references=references)\r\nprint(results)\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: Python 3.8.0\r\n- PyArrow version: 5.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2736","id":956895199,"node_id":"MDU6SXNzdWU5NTY4OTUxOTk=","number":2736,"title":"Add Microsoft Building Footprints dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-30T16:17:08Z","updated_at":"2021-07-31T05:02:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Microsoft Building Footprints\r\n- **Description:** With the goal to increase the coverage of building footprint data available as open data for OpenStreetMap and humanitarian efforts, we have released millions of building footprints as open data available to download free of charge.\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** https:\/\/www.microsoft.com\/en-us\/maps\/building-footprints\r\n- **Motivation:** this can be a useful dataset for researchers working on climate change adaptation, urban studies, geography, etc.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nReported by: @sashavor","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2735","id":956889365,"node_id":"MDU6SXNzdWU5NTY4ODkzNjU=","number":2735,"title":"Add Open Buildings dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T16:08:39Z","updated_at":"2021-07-31T05:01:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Open Buildings\r\n- **Description:** A dataset of building footprints to support social good applications.\r\n\r\n Building footprints are useful for a range of important applications, from population estimation, urban planning and humanitarian response, to environmental and climate science. This large-scale open dataset contains the outlines of buildings derived from high-resolution satellite imagery in order to support these types of uses. The project being based in Ghana, the current focus is on the continent of Africa.\r\n\r\n See: \"Mapping Africa's Buildings with Satellite Imagery\" https:\/\/ai.googleblog.com\/2021\/07\/mapping-africas-buildings-with.html\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2107.12283\r\n- **Data:** https:\/\/sites.research.google\/open-buildings\/\r\n- **Motivation:** *what are some good reasons to have this dataset*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nReported by: @osanseviero ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734","id":956844874,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMzc4NjI4","number":2734,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T15:22:51Z","updated_at":"2021-07-30T15:47:58Z","closed_at":"2021-07-30T15:47:58Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2734","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734.patch"},"body":"Update BibTeX entry.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733","id":956725476,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMjc1NDMy","number":2733,"title":"Add missing parquet known extension","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T13:01:20Z","updated_at":"2021-07-30T13:24:31Z","closed_at":"2021-07-30T13:24:30Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2733","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733.patch"},"body":"This code was failing because the parquet extension wasn't recognized:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nbase_url = \"https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/wikipedia\/20200501.en\/1.0.0\/\"\r\ndata_files = {\"train\": base_url + \"wikipedia-train.parquet\"}\r\nwiki = load_dataset(\"parquet\", data_files=data_files, split=\"train\", streaming=True)\r\n```\r\n\r\nIt raises\r\n```python\r\nNotImplementedError: Extraction protocol for file at https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/wikipedia\/20200501.en\/1.0.0\/wikipedia-train.parquet is not implemented yet\r\n```\r\n\r\nI added `parquet` to the list of known extensions\r\n\r\nEDIT: added pickle, conllu, xml extensions as well","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732","id":956676360,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMjMzMzQy","number":2732,"title":"Updated TTC4900 Dataset","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-30T11:52:14Z","updated_at":"2021-07-30T16:00:51Z","closed_at":"2021-07-30T15:58:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2732","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732.patch"},"body":"- The source address of the TTC4900 dataset of [@savasy](https:\/\/github.com\/savasy) has been updated for direct download.\r\n- Updated readme.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731","id":956087452,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5NzQwMjg5","number":2731,"title":"First draft of a method to auto-convert our datasets to TF datasets!","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-29T18:10:25Z","updated_at":"2021-08-05T16:50:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2731","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731.patch"},"body":"Oh my **god** do not merge this yet, it's just a draft.\r\n\r\nI've added a method (via a mixin) to the `arrow_dataset.Dataset` class that automatically converts our Dataset classes to TF Dataset classes ready for training. It hopefully has most of the features we want, including streaming from disk (no need to load the whole dataset in memory!), correct shuffling, variable-length batches to reduce compute, and correct support for unusual padding. It achieves that by calling the tokenizer `pad` method in the middle of a TF compute graph via a very hacky call to `tf.py_function`, which is heretical but seems to work.\r\n\r\nA number of issues need to be resolved before it's ready to merge, though:\r\n\r\n1) Is a MixIn the right way to do this? Do other classes besides `arrow_dataset.Dataset` need this method too?\r\n2) Needs an argument to support constant-length batches for TPU training - this is easy to add and I'll do it soon.\r\n3) Needs the user to supply the list of columns to drop from the arrow `Dataset`. Is there some automatic way to get the columns we want, or see which columns were added by the tokenizer?\r\n4) Assumes the label column is always present and always called \"label\" - this is probably not great, but I'm not sure what the 'correct' thing to do here is.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2730","id":955987834,"node_id":"MDU6SXNzdWU5NTU5ODc4MzQ=","number":2730,"title":"Update CommonVoice with new release","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-29T15:59:59Z","updated_at":"2021-08-07T16:19:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CommonVoice mid-2021 release\r\n- **Description:** more data in CommonVoice: Languages that have increased the most by percentage are Thai (almost 20x growth, from 12 hours to 250 hours), Luganda (almost 9x growth, from 8 to 80), Esperanto (7x growth, from 100 to 840), and Tamil (almost 8x, from 24 to 220).\r\n- **Paper:** https:\/\/discourse.mozilla.org\/t\/common-voice-2021-mid-year-dataset-release\/83812\r\n- **Data:** https:\/\/commonvoice.mozilla.org\/en\/datasets\r\n- **Motivation:** More data and more varied. I think we just need to add configs in the existing dataset script.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729","id":955920489,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5NTk5MjA4","number":2729,"title":"Fix IndexError while loading Arabic Billion Words dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-29T14:47:02Z","updated_at":"2021-07-30T13:03:55Z","closed_at":"2021-07-30T13:03:55Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2729","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729.patch"},"body":"Catch `IndexError` and ignore that record.\r\n\r\nClose #2727.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2728","id":955892970,"node_id":"MDU6SXNzdWU5NTU4OTI5NzA=","number":2728,"title":"Concurrent use of same dataset (already downloaded)","user":{"login":"PierreColombo","id":22492839,"node_id":"MDQ6VXNlcjIyNDkyODM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22492839?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PierreColombo","html_url":"https:\/\/github.com\/PierreColombo","followers_url":"https:\/\/api.github.com\/users\/PierreColombo\/followers","following_url":"https:\/\/api.github.com\/users\/PierreColombo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PierreColombo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PierreColombo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PierreColombo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PierreColombo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PierreColombo\/repos","events_url":"https:\/\/api.github.com\/users\/PierreColombo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PierreColombo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-29T14:18:38Z","updated_at":"2021-08-02T07:25:57Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen launching several jobs at the same time loading the same dataset trigger some errors see (last comments).\r\n\r\n## Steps to reproduce the bug\r\nexport HF_DATASETS_CACHE=\/gpfswork\/rech\/toto\/datasets\r\nfor MODEL in \"bert-base-uncased\" \"roberta-base\" \"distilbert-base-cased\"; do # \"bert-base-uncased\" \"bert-large-cased\" \"roberta-large\" \"albert-base-v1\" \"albert-large-v1\"; do\r\n for TASK_NAME in \"mrpc\" \"rte\" 'imdb' \"paws\" \"mnli\"; do\r\n export OUTPUT_DIR=${MODEL}_${TASK_NAME}\r\n sbatch --job-name=${OUTPUT_DIR} \\\r\n --gres=gpu:1 \\\r\n --no-requeue \\\r\n --cpus-per-task=10 \\\r\n --hint=nomultithread \\\r\n --time=1:00:00 \\\r\n --output=jobinfo\/${OUTPUT_DIR}_%j.out \\\r\n --error=jobinfo\/${OUTPUT_DIR}_%j.err \\\r\n --qos=qos_gpu-t4 \\\r\n --wrap=\"module purge; module load pytorch-gpu\/py3\/1.7.0 ; export HF_DATASETS_OFFLINE=1; export HF_DATASETS_CACHE=\/gpfswork\/rech\/toto\/datasets; python compute_measures.py --seed=$SEED --saving_path=results --batch_size=$BATCH_SIZE --task_name=$TASK_NAME --model_name=\/gpfswork\/rech\/toto\/transformers_models\/$MODEL\"\r\n\r\n done\r\ndone\r\n\r\n\r\n\r\n```python\r\n# Sample code to reproduce the bug\r\n dataset_train = load_dataset('imdb', split='train', download_mode=\"reuse_cache_if_exists\")\r\n dataset_train = dataset_train.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True).select(list(range(args.filter)))\r\n\r\n dataset_val = load_dataset('imdb', split='train', download_mode=\"reuse_cache_if_exists\")\r\n dataset_val = dataset_val.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True).select(list(range(args.filter, args.filter + 5000)))\r\n\r\n dataset_test = load_dataset('imdb', split='test', download_mode=\"reuse_cache_if_exists\")\r\n dataset_test = dataset_test.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True)\r\n```\r\n\r\n## Expected results\r\nI believe I am doing something wrong with the objects. \r\n\r\n## Actual results\r\nTraceback (most recent call last):\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 983, in _prepare_split\r\n check_duplicates=True,\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 192, in __init__\r\n self.stream = pa.OSFile(self._path, \"wb\")\r\n File \"pyarrow\/io.pxi\", line 829, in pyarrow.lib.OSFile.__cinit__\r\n File \"pyarrow\/io.pxi\", line 844, in pyarrow.lib.OSFile._open_writable\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 97, in pyarrow.lib.check_status\r\nFileNotFoundError: [Errno 2] Failed to open local file '\/gpfswork\/rech\/tts\/unm25jp\/datasets\/paws\/labeled_final\/1.1.0\/09d8fae989bb569009a8f5b879ccf2924d3e5cd55bfe2e89e6dab1c0b50ecd34.incomplete\/paws-test.arrow'. Detail: [errno 2] No such file or directory\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"compute_measures.py\", line 181, in \r\n train_loader, val_loader, test_loader = get_dataloader(args)\r\n File \"\/gpfsdswork\/projects\/rech\/toto\/intRAOcular\/dataset_utils.py\", line 69, in get_dataloader\r\n dataset_train = load_dataset('paws', \"labeled_final\", split='train', download_mode=\"reuse_cache_if_exists\")\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 658, in _download_and_prepare\r\n + str(e)\r\nOSError: Cannot find data file.\r\nOriginal error:\r\n[Errno 2] Failed to open local file '\/gpfswork\/rech\/toto\/datasets\/paws\/labeled_final\/1.1.0\/09d8fae989bb569009a8f5b879ccf2924d3e5cd55bfe2e89e6dab1c0b50ecd34.incomplete\/paws-test.arrow'. Detail: [errno 2] No such file or directory\r\n\r\n## Environment info\r\n\r\n- `datasets` version: datasets==1.8.0\r\n- Platform: linux (jeanzay)\r\n- Python version: pyarrow==2.0.0\r\n- PyArrow version: 3.7.8\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2727","id":955812149,"node_id":"MDU6SXNzdWU5NTU4MTIxNDk=","number":2727,"title":"Error in loading the Arabic Billion Words Corpus","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-29T12:53:09Z","updated_at":"2021-07-30T13:03:55Z","closed_at":"2021-07-30T13:03:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI get `IndexError: list index out of range` when trying to load the `Techreen` and `Almustaqbal` configs of the dataset.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset(\"arabic_billion_words\", \"Techreen\")\r\nload_dataset(\"arabic_billion_words\", \"Almustaqbal\")\r\n```\r\n\r\n## Expected results\r\nThe datasets load succefully.\r\n\r\n## Actual results\r\n```python\r\n_extract_tags(self, sample, tag)\r\n 139 if len(out) > 0:\r\n 140 break\r\n--> 141 return out[0]\r\n 142 \r\n 143 def _clean_text(self, text):\r\n\r\nIndexError: list index out of range\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.2\r\n- Platform: Ubuntu 18.04.5 LTS\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726","id":955674388,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5Mzg5MDk1","number":2726,"title":"Typo fix `tokenize_exemple`","user":{"login":"shabie","id":30535146,"node_id":"MDQ6VXNlcjMwNTM1MTQ2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30535146?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shabie","html_url":"https:\/\/github.com\/shabie","followers_url":"https:\/\/api.github.com\/users\/shabie\/followers","following_url":"https:\/\/api.github.com\/users\/shabie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shabie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shabie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shabie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shabie\/orgs","repos_url":"https:\/\/api.github.com\/users\/shabie\/repos","events_url":"https:\/\/api.github.com\/users\/shabie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shabie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-29T10:03:37Z","updated_at":"2021-07-29T12:00:25Z","closed_at":"2021-07-29T12:00:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2726","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726.patch"},"body":"There is a small typo in the main README.md","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725","id":955020776,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4ODMwNjYw","number":2725,"title":"Pass use_auth_token to request_etags","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-28T16:13:29Z","updated_at":"2021-07-28T16:38:02Z","closed_at":"2021-07-28T16:38:02Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2725","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725.patch"},"body":"Fix #2724.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2724","id":954919607,"node_id":"MDU6SXNzdWU5NTQ5MTk2MDc=","number":2724,"title":"404 Error when loading remote data files from private repo","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-07-28T14:24:23Z","updated_at":"2021-07-29T04:58:49Z","closed_at":"2021-07-28T16:38:01Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen loading remote data files from a private repo, a 404 error is raised.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nurl = hf_hub_url(\"lewtun\/asr-preds-test\", \"preds.jsonl\", repo_type=\"dataset\")\r\ndset = load_dataset(\"json\", data_files=url, use_auth_token=True)\r\n# HTTPError: 404 Client Error: Not Found for url: https:\/\/huggingface.co\/datasets\/lewtun\/asr-preds-test\/resolve\/main\/preds.jsonl\r\n```\r\n\r\n## Expected results\r\nLoad dataset.\r\n\r\n## Actual results\r\n404 Error.\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723","id":954864104,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4Njk0NDMw","number":2723,"title":"Fix en subset by modifying dataset_info with correct validation infos","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-28T13:36:19Z","updated_at":"2021-07-28T15:22:23Z","closed_at":"2021-07-28T15:22:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2723","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723.patch"},"body":"- Related to: #2682 \r\n\r\nWe correct the values of `en` subset concerning the expected validation values (both `num_bytes` and `num_examples`.\r\n\r\nInstead of having:\r\n\r\n`{\"name\": \"validation\", \"num_bytes\": 828589180707, \"num_examples\": 364868892, \"dataset_name\": \"c4\"}`\r\n\r\nWe replace with correct values:\r\n\r\n`{\"name\": \"validation\", \"num_bytes\": 825767266, \"num_examples\": 364608, \"dataset_name\": \"c4\"}`\r\n\r\nThere are still issues with validation with other subsets, but I can't download all the files, unzip to check for the correct number of bytes. (If you have a fast way to obtain those values for other subsets, I can do this in this PR ... otherwise I can't spend those resources)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2722","id":954446053,"node_id":"MDU6SXNzdWU5NTQ0NDYwNTM=","number":2722,"title":"Missing cache file","user":{"login":"PosoSAgapo","id":33200481,"node_id":"MDQ6VXNlcjMzMjAwNDgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33200481?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PosoSAgapo","html_url":"https:\/\/github.com\/PosoSAgapo","followers_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/followers","following_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/repos","events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-28T03:52:07Z","updated_at":"2021-07-28T09:07:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Strangely missing cache file after I restart my program again.\r\n\r\n`glue_dataset = datasets.load_dataset('glue', 'sst2')`\r\n\r\n`FileNotFoundError: [Errno 2] No such file or directory: \/Users\/chris\/.cache\/huggingface\/datasets\/glue\/sst2\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96d6053ad\/dataset_info.json'`\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721","id":954238230,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4MTY0Njg3","number":2721,"title":"Deal with the bad check in test_load.py","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-27T20:23:23Z","updated_at":"2021-07-28T09:58:34Z","closed_at":"2021-07-28T08:53:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2721","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721.patch"},"body":"This PR removes a check that's been added in #2684. My intention with this check was to capture an URL in the error message, but instead, it captures a substring of the previous regex match in the test function. Another option would be to replace this check with:\r\n```python\r\nm_paths = re.findall(r\"\\S*_dummy\/_dummy.py\\b\", str(exc_info.value)) # on Linux this will match an URL as well as a local_path due to different os.sep, so take the last element (an URL always comes last in the list)\r\nassert len(m_paths) > 0 and is_remote_url(m_paths[-1]) # is_remote_url comes from datasets.utils.file_utils\r\n```\r\n\r\n@lhoestq Let me know which one of these two approaches (delete or replace) do you prefer?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720","id":954024426,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3OTgxNjMx","number":2720,"title":"fix: \ud83d\udc1b fix two typos","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-27T15:50:17Z","updated_at":"2021-07-27T18:38:17Z","closed_at":"2021-07-27T18:38:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2720","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2719","id":953932416,"node_id":"MDU6SXNzdWU5NTM5MzI0MTY=","number":2719,"title":"Use ETag in streaming mode to detect resource updates","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-27T14:17:09Z","updated_at":"2021-07-27T14:17:09Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nI want to cache data I generate from processing a dataset I've loaded in streaming mode, but I've currently no way to know if the remote data has been updated or not, thus I don't know when to invalidate my cache.\r\n\r\n**Describe the solution you'd like**\r\n\r\nTake the ETag of the data files into account and provide it (directly or through a hash) to give a signal that I can invalidate my cache.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nNone\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718","id":953360663,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3NDE0NTQy","number":2718,"title":"Docs structure","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-26T23:15:13Z","updated_at":"2021-08-11T00:17:23Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2718","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718.patch"},"body":"Organize Datasets documentation into four documentation types to improve clarity and discoverability of content.\r\n\r\n- [x] Tutorials\r\n- [x] How-to guides\r\n- [ ] Conceptual guides (mostly done, need Quentin's help with a few sections)\r\n- [x] Reference\r\n- [x] Review","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717","id":952979976,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3MDkzNDEx","number":2717,"title":"Fix shuffle on IterableDataset that disables batching in case any functions were mapped","user":{"login":"amankhandelia","id":7098967,"node_id":"MDQ6VXNlcjcwOTg5Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7098967?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amankhandelia","html_url":"https:\/\/github.com\/amankhandelia","followers_url":"https:\/\/api.github.com\/users\/amankhandelia\/followers","following_url":"https:\/\/api.github.com\/users\/amankhandelia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amankhandelia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amankhandelia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amankhandelia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amankhandelia\/orgs","repos_url":"https:\/\/api.github.com\/users\/amankhandelia\/repos","events_url":"https:\/\/api.github.com\/users\/amankhandelia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amankhandelia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-26T14:42:22Z","updated_at":"2021-07-26T18:04:14Z","closed_at":"2021-07-26T16:30:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2717","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717.patch"},"body":"Made a very minor change to fix the issue#2716. Added the missing argument in the constructor call.\r\n\r\nAs discussed in the bug report, the change is made to prevent the `shuffle` method call from resetting the value of `batched` attribute in `MappedExamplesIterable`\r\n\r\nFix #2716.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2716","id":952902778,"node_id":"MDU6SXNzdWU5NTI5MDI3Nzg=","number":2716,"title":"Calling shuffle on IterableDataset will disable batching in case any functions were mapped","user":{"login":"amankhandelia","id":7098967,"node_id":"MDQ6VXNlcjcwOTg5Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7098967?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amankhandelia","html_url":"https:\/\/github.com\/amankhandelia","followers_url":"https:\/\/api.github.com\/users\/amankhandelia\/followers","following_url":"https:\/\/api.github.com\/users\/amankhandelia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amankhandelia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amankhandelia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amankhandelia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amankhandelia\/orgs","repos_url":"https:\/\/api.github.com\/users\/amankhandelia\/repos","events_url":"https:\/\/api.github.com\/users\/amankhandelia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amankhandelia\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-26T13:24:59Z","updated_at":"2021-07-26T18:04:43Z","closed_at":"2021-07-26T18:04:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"When using dataset in streaming mode, if one applies `shuffle` method on the dataset and `map` method for which `batched=True` than the batching operation will not happen, instead `batched` will be set to `False`\r\n\r\nI did RCA on the dataset codebase, the problem is emerging from [this line of code](https:\/\/github.com\/huggingface\/datasets\/blob\/d25a0bf94d9f9a9aa6cabdf5b450b9c327d19729\/src\/datasets\/iterable_dataset.py#L197) here as it is\r\n`self.ex_iterable.shuffle_data_sources(seed), function=self.function, batch_size=self.batch_size`, as one can see it is missing batched argument, which means that the iterator fallsback to default constructor value, which in this case is `False`.\r\nTo remedy the problem we can change this line to\r\n`self.ex_iterable.shuffle_data_sources(seed), function=self.function, batched=self.batched, batch_size=self.batch_size`\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715","id":952845229,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2OTc5MjQ1","number":2715,"title":"Update PAN-X data URL in XTREME dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-26T12:21:17Z","updated_at":"2021-07-26T13:27:59Z","closed_at":"2021-07-26T13:27:59Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2715","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715.patch"},"body":"Related to #2710, #2691.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2714","id":952580820,"node_id":"MDU6SXNzdWU5NTI1ODA4MjA=","number":2714,"title":"add more precise information for size","user":{"login":"pennyl67","id":1493902,"node_id":"MDQ6VXNlcjE0OTM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1493902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pennyl67","html_url":"https:\/\/github.com\/pennyl67","followers_url":"https:\/\/api.github.com\/users\/pennyl67\/followers","following_url":"https:\/\/api.github.com\/users\/pennyl67\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pennyl67\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pennyl67\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pennyl67\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pennyl67\/orgs","repos_url":"https:\/\/api.github.com\/users\/pennyl67\/repos","events_url":"https:\/\/api.github.com\/users\/pennyl67\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pennyl67\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-26T07:11:03Z","updated_at":"2021-07-26T09:16:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"For the import into ELG, we would like a more precise description of the size of the dataset, instead of the current size categories. The size can be expressed in bytes, or any other preferred size unit. As suggested in the slack channel, perhaps this could be computed with a regex for existing datasets.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713","id":952515256,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2Njk3MzU0","number":2713,"title":"Enumerate all ner_tags values in WNUT 17 dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-26T05:22:16Z","updated_at":"2021-07-26T09:30:55Z","closed_at":"2021-07-26T09:30:55Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2713","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713.patch"},"body":"This PR does:\r\n- Enumerate all ner_tags in dataset card Data Fields section\r\n- Add all metadata tags to dataset card\r\n\r\nClose #2709.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710","id":951723326,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2MDYyNjAy","number":2710,"title":"Update WikiANN data URL","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-23T16:29:21Z","updated_at":"2021-07-26T09:34:23Z","closed_at":"2021-07-26T09:34:23Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2710","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710.patch"},"body":"WikiANN data source URL is no longer accessible: 404 error from Dropbox.\r\n\r\nWe have decided to host it at Hugging Face. This PR updates the data source URL, the metadata JSON file and the dataset card.\r\n\r\nClose #2691.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2709","id":951534757,"node_id":"MDU6SXNzdWU5NTE1MzQ3NTc=","number":2709,"title":"Missing documentation for wnut_17 (ner_tags)","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-23T12:25:32Z","updated_at":"2021-07-26T09:30:55Z","closed_at":"2021-07-26T09:30:55Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"On the info page of the wnut_17 data set (https:\/\/huggingface.co\/datasets\/wnut_17), the model output of ner-tags is only documented for these 5 cases:\r\n\r\n`ner_tags: a list of classification labels, with possible values including O (0), B-corporation (1), I-corporation (2), B-creative-work (3), I-creative-work (4).`\r\n\r\nI trained a model with the data and it gives me 13 classes:\r\n\r\n```\r\n\"id2label\": {\r\n \"0\": 0,\r\n \"1\": 1,\r\n \"2\": 2,\r\n \"3\": 3,\r\n \"4\": 4,\r\n \"5\": 5,\r\n \"6\": 6,\r\n \"7\": 7,\r\n \"8\": 8,\r\n \"9\": 9,\r\n \"10\": 10,\r\n \"11\": 11,\r\n \"12\": 12\r\n }\r\n\r\n \"label2id\": {\r\n \"0\": 0,\r\n \"1\": 1,\r\n \"10\": 10,\r\n \"11\": 11,\r\n \"12\": 12,\r\n \"2\": 2,\r\n \"3\": 3,\r\n \"4\": 4,\r\n \"5\": 5,\r\n \"6\": 6,\r\n \"7\": 7,\r\n \"8\": 8,\r\n \"9\": 9\r\n }\r\n```\r\nThe paper (https:\/\/www.aclweb.org\/anthology\/W17-4418.pdf) explains those 6 categories, but the ordering does not match:\r\n\r\n```\r\n1. person\r\n2. location (including GPE, facility)\r\n3. corporation\r\n4. product (tangible goods, or well-defined\r\nservices)\r\n5. creative-work (song, movie, book and\r\nso on)\r\n6. group (subsuming music band, sports team,\r\nand non-corporate organisations)\r\n```\r\nI would be very helpful for me, if somebody could clarify the model ouputs and explain the \"B-\" and \"I-\" prefixes to me.\r\n\r\nReally great work with that and the other packages, I couldn't believe that training the model with that data was basically a one-liner!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2708","id":951092660,"node_id":"MDU6SXNzdWU5NTEwOTI2NjA=","number":2708,"title":"QASC: incomplete training set ","user":{"login":"danyaljj","id":2441454,"node_id":"MDQ6VXNlcjI0NDE0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2441454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danyaljj","html_url":"https:\/\/github.com\/danyaljj","followers_url":"https:\/\/api.github.com\/users\/danyaljj\/followers","following_url":"https:\/\/api.github.com\/users\/danyaljj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danyaljj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danyaljj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danyaljj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danyaljj\/orgs","repos_url":"https:\/\/api.github.com\/users\/danyaljj\/repos","events_url":"https:\/\/api.github.com\/users\/danyaljj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danyaljj\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-22T21:59:44Z","updated_at":"2021-07-23T13:30:07Z","closed_at":"2021-07-23T13:30:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe training instances are not loaded properly. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"qasc\", script_version='1.10.2')\r\n \r\ndef load_instances(split): \r\n instances = dataset[split]\r\n print(f\"split: {split} - size: {len(instances)}\")\r\n for x in instances:\r\n print(json.dumps(x))\r\n\r\n\r\nload_instances('test')\r\nload_instances('validation')\r\nload_instances('train')\r\n```\r\n\r\n## results\r\nFor test and validation, we can see the examples in the output (which is good!): \r\n```\r\nsplit: test - size: 920\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Anthax\", \"under water\", \"uterus\", \"wombs\", \"two\", \"moles\", \"live\", \"embryo\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"What type of birth do therian mammals have? (A) Anthax (B) under water (C) uterus (D) wombs (E) two (F) moles (G) live (H) embryo\", \"id\": \"3C44YUNSI1OBFBB8D36GODNOZN9DPA\", \"question\": \"What type of birth do therian mammals have?\"}\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Corvidae\", \"arthropods\", \"birds\", \"backbones\", \"keratin\", \"Jurassic\", \"front paws\", \"Parakeets.\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"By what time had mouse-sized viviparous mammals evolved? (A) Corvidae (B) arthropods (C) birds (D) backbones (E) keratin (F) Jurassic (G) front paws (H) Parakeets.\", \"id\": \"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ\", \"question\": \"By what time had mouse-sized viviparous mammals evolved?\"}\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Reduced friction\", \"causes infection\", \"vital to a good life\", \"prevents water loss\", \"camouflage from consumers\", \"Protection against predators\", \"spur the growth of the plant\", \"a smooth surface\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"What does a plant's skin do? (A) Reduced friction (B) causes infection (C) vital to a good life (D) prevents water loss (E) camouflage from consumers (F) Protection against predators (G) spur the growth of the plant (H) a smooth surface\", \"id\": \"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9\", \"question\": \"What does a plant's skin do?\"}\r\n...\r\n```\r\nHowever, only a few instances are loaded for the training split, which is not correct. \r\n\r\n## Environment info\r\n- `datasets` version: '1.10.2' \r\n- Platform: MaxOS \r\n- Python version:3.7\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2707","id":950812945,"node_id":"MDU6SXNzdWU5NTA4MTI5NDU=","number":2707,"title":"404 Not Found Error when loading LAMA dataset","user":{"login":"dwil2444","id":26467159,"node_id":"MDQ6VXNlcjI2NDY3MTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26467159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dwil2444","html_url":"https:\/\/github.com\/dwil2444","followers_url":"https:\/\/api.github.com\/users\/dwil2444\/followers","following_url":"https:\/\/api.github.com\/users\/dwil2444\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dwil2444\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dwil2444\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dwil2444\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dwil2444\/orgs","repos_url":"https:\/\/api.github.com\/users\/dwil2444\/repos","events_url":"https:\/\/api.github.com\/users\/dwil2444\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dwil2444\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-22T15:52:33Z","updated_at":"2021-07-26T14:29:07Z","closed_at":"2021-07-26T14:29:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"The [LAMA](https:\/\/huggingface.co\/datasets\/viewer\/?dataset=lama) probing dataset is not available for download: \r\n\r\nSteps to Reproduce: \r\n\r\n1. `from datasets import load_dataset`\r\n2. `dataset = load_dataset('lama', 'trex')`. \r\n\r\n\r\nResults: \r\n`FileNotFoundError: Couldn't find file locally at lama\/lama.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/lama\/lama.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/lama\/lama.py`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706","id":950606561,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk1MTI3ODgz","number":2706,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T12:29:29Z","updated_at":"2021-07-22T12:43:00Z","closed_at":"2021-07-22T12:43:00Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2706","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706.patch"},"body":"Update BibTeX entry.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2705","id":950488583,"node_id":"MDU6SXNzdWU5NTA0ODg1ODM=","number":2705,"title":"404 not found error on loading WIKIANN dataset","user":{"login":"ronbutan","id":39296659,"node_id":"MDQ6VXNlcjM5Mjk2NjU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39296659?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ronbutan","html_url":"https:\/\/github.com\/ronbutan","followers_url":"https:\/\/api.github.com\/users\/ronbutan\/followers","following_url":"https:\/\/api.github.com\/users\/ronbutan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ronbutan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ronbutan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ronbutan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ronbutan\/orgs","repos_url":"https:\/\/api.github.com\/users\/ronbutan\/repos","events_url":"https:\/\/api.github.com\/users\/ronbutan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ronbutan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T09:55:50Z","updated_at":"2021-07-23T08:07:32Z","closed_at":"2021-07-23T08:07:32Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nUnable to retreive wikiann English dataset\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import list_datasets, load_dataset, list_metrics, load_metric\r\nWIKIANN = load_dataset(\"wikiann\",\"en\")\r\n```\r\n\r\n## Expected results\r\nColab notebook should display successful download status\r\n\r\n## Actual results\r\nFileNotFoundError: Couldn't find file at https:\/\/www.dropbox.com\/s\/12h3qqog6q4bjve\/panx_dataset.tar?dl=1\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704","id":950483980,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk1MDIzMTEz","number":2704,"title":"Fix pick default config name message","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T09:49:43Z","updated_at":"2021-07-22T10:02:41Z","closed_at":"2021-07-22T10:02:40Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2704","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704.patch"},"body":"The error message to tell which config name to load is not displayed. \r\n\r\nThis is because in the code it was considering the config kwargs to be non-empty, which is a special case for custom configs created on the fly. It appears after this change: https:\/\/github.com\/huggingface\/datasets\/pull\/2659\r\n\r\nI fixed that by making the config kwargs empty by default, even if default parameters are passed\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2703","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2703","id":950482284,"node_id":"MDU6SXNzdWU5NTA0ODIyODQ=","number":2703,"title":"Bad message when config name is missing","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-22T09:47:23Z","updated_at":"2021-07-22T10:02:40Z","closed_at":"2021-07-22T10:02:40Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"When loading a dataset that have several configurations, we expect to see an error message if the user doesn't specify a config name.\r\n\r\nHowever in `datasets` 1.10.0 and 1.10.1 it doesn't show the right message:\r\n\r\n```python\r\nimport datasets\r\n\r\ndatasets.load_dataset(\"glue\")\r\n```\r\nraises\r\n```python\r\nAttributeError: 'BuilderConfig' object has no attribute 'text_features'\r\n```\r\ninstead of\r\n```python\r\nValueError: Config name is missing.\r\nPlease pick one among the available configs: ['cola', 'sst2', 'mrpc', 'qqp', 'stsb', 'mnli', 'mnli_mismatched', 'mnli_matched', 'qnli', 'rte', 'wnli', 'ax']\r\nExample of usage:\r\n `load_dataset('glue', 'cola')`\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702","id":950448159,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0OTkyOTc1","number":2702,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T09:04:39Z","updated_at":"2021-07-22T09:17:39Z","closed_at":"2021-07-22T09:17:38Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2702","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702.patch"},"body":"Update BibTeX entry.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701","id":950422403,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0OTcxMzM3","number":2701,"title":"Fix download_mode docstrings","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T08:30:25Z","updated_at":"2021-07-22T09:33:31Z","closed_at":"2021-07-22T09:33:31Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2701","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701.patch"},"body":"Fix `download_mode` docstrings.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2700","id":950276325,"node_id":"MDU6SXNzdWU5NTAyNzYzMjU=","number":2700,"title":"from datasets import Dataset is failing ","user":{"login":"kswamy15","id":5582286,"node_id":"MDQ6VXNlcjU1ODIyODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5582286?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kswamy15","html_url":"https:\/\/github.com\/kswamy15","followers_url":"https:\/\/api.github.com\/users\/kswamy15\/followers","following_url":"https:\/\/api.github.com\/users\/kswamy15\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kswamy15\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kswamy15\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kswamy15\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kswamy15\/orgs","repos_url":"https:\/\/api.github.com\/users\/kswamy15\/repos","events_url":"https:\/\/api.github.com\/users\/kswamy15\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kswamy15\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T03:51:23Z","updated_at":"2021-07-22T07:23:45Z","closed_at":"2021-07-22T07:09:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import Dataset\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in ()\r\n 25 import posixpath\r\n 26 import requests\r\n---> 27 from tqdm.contrib.concurrent import thread_map\r\n 28 \r\n 29 from .. import __version__, config, utils\r\n\r\nModuleNotFoundError: No module named 'tqdm.contrib.concurrent'\r\n\r\n---------------------------------------------------------------------------\r\nNOTE: If your import is failing due to a missing package, you can\r\nmanually install dependencies using either !pip or !apt.\r\n\r\nTo view examples of installing some common dependencies, click the\r\n\"Open Examples\" button below.\r\n---------------------------------------------------------------------------\r\n\r\n## Environment info\r\n\r\n- `datasets` version: latest version as of 07\/21\/2021\r\n- Platform: Google Colab\r\n- Python version: 3.7\r\n- PyArrow version:\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2699","id":950221226,"node_id":"MDU6SXNzdWU5NTAyMjEyMjY=","number":2699,"title":"cannot combine splits merging and streaming?","user":{"login":"eyaler","id":4436747,"node_id":"MDQ6VXNlcjQ0MzY3NDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4436747?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eyaler","html_url":"https:\/\/github.com\/eyaler","followers_url":"https:\/\/api.github.com\/users\/eyaler\/followers","following_url":"https:\/\/api.github.com\/users\/eyaler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eyaler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eyaler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eyaler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eyaler\/orgs","repos_url":"https:\/\/api.github.com\/users\/eyaler\/repos","events_url":"https:\/\/api.github.com\/users\/eyaler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eyaler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T01:13:25Z","updated_at":"2021-07-22T08:27:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"this does not work:\r\n`dataset = datasets.load_dataset('mc4','iw',split='train+validation',streaming=True)`\r\nwith error:\r\n`ValueError: Bad split: train+validation. Available splits: ['train', 'validation']`\r\n\r\nthese work:\r\n`dataset = datasets.load_dataset('mc4','iw',split='train+validation')`\r\n`dataset = datasets.load_dataset('mc4','iw',split='train',streaming=True)`\r\n`dataset = datasets.load_dataset('mc4','iw',split='validation',streaming=True)`\r\n\r\ni could not find a reference to this in the documentation and the error message is confusing. also would be nice to allow streaming for the merged splits","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698","id":950159867,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NzUxMzMw","number":2698,"title":"Ignore empty batch when writing","user":{"login":"pcuenca","id":1177582,"node_id":"MDQ6VXNlcjExNzc1ODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1177582?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pcuenca","html_url":"https:\/\/github.com\/pcuenca","followers_url":"https:\/\/api.github.com\/users\/pcuenca\/followers","following_url":"https:\/\/api.github.com\/users\/pcuenca\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pcuenca\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pcuenca\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pcuenca\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pcuenca\/orgs","repos_url":"https:\/\/api.github.com\/users\/pcuenca\/repos","events_url":"https:\/\/api.github.com\/users\/pcuenca\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pcuenca\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T22:35:30Z","updated_at":"2021-07-26T14:56:03Z","closed_at":"2021-07-26T13:25:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2698","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698.patch"},"body":"This prevents an schema update with unknown column types, as reported in #2644.\r\n\r\nThis is my first attempt at fixing the issue. I tested the following:\r\n- First batch returned by a batched map operation is empty.\r\n- An intermediate batch is empty.\r\n- `python -m unittest tests.test_arrow_writer` passes.\r\n\r\nHowever, `arrow_writer` looks like a pretty generic interface, I'm not sure if there are other uses I may have overlooked. Let me know if that's the case, or if a better approach would be preferable.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697","id":950021623,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NjMyODg0","number":2697,"title":"Fix import on Colab","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T19:03:38Z","updated_at":"2021-07-22T07:09:08Z","closed_at":"2021-07-22T07:09:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2697","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697.patch"},"body":"Fix #2695, fix #2700. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696","id":949901726,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NTMwODg3","number":2696,"title":"Add support for disable_progress_bar on Windows","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T16:34:53Z","updated_at":"2021-07-26T13:31:14Z","closed_at":"2021-07-26T09:38:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2696","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696.patch"},"body":"This PR is a continuation of #2667 and adds support for `utils.disable_progress_bar()` on Windows when using multiprocessing. This [answer](https:\/\/stackoverflow.com\/a\/6596695\/14095927) on SO explains it nicely why the current approach (with calling `utils.is_progress_bar_enabled()` inside `Dataset._map_single`) would not work on Windows.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2695","id":949864823,"node_id":"MDU6SXNzdWU5NDk4NjQ4MjM=","number":2695,"title":"Cannot import load_dataset on Colab","user":{"login":"bayartsogt-ya","id":43239645,"node_id":"MDQ6VXNlcjQzMjM5NjQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43239645?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bayartsogt-ya","html_url":"https:\/\/github.com\/bayartsogt-ya","followers_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/followers","following_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/orgs","repos_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/repos","events_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-21T15:52:51Z","updated_at":"2021-07-22T07:26:25Z","closed_at":"2021-07-22T07:09:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nGot tqdm concurrent module not found error during importing load_dataset from datasets.\r\n\r\n## Steps to reproduce the bug\r\nHere [colab notebook](https:\/\/colab.research.google.com\/drive\/1pErWWnVP4P4mVHjSFUtkePd8Na_Qirg4?usp=sharing) to reproduce the error\r\n\r\nOn colab:\r\n```python\r\n!pip install datasets\r\nfrom datasets import load_dataset\r\n```\r\n\r\n## Expected results\r\nWorks without error\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n```\r\nModuleNotFoundError Traceback (most recent call last)\r\n in ()\r\n----> 1 from datasets import load_dataset, load_metric, Metric, MetricInfo, Features, Value\r\n 2 from sklearn.metrics import mean_squared_error\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/__init__.py in ()\r\n 31 )\r\n 32 \r\n---> 33 from .arrow_dataset import Dataset, concatenate_datasets\r\n 34 from .arrow_reader import ArrowReader, ReadInstruction\r\n 35 from .arrow_writer import ArrowWriter\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in ()\r\n 40 from tqdm.auto import tqdm\r\n 41 \r\n---> 42 from datasets.tasks.text_classification import TextClassification\r\n 43 \r\n 44 from . import config, utils\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/tasks\/__init__.py in ()\r\n 1 from typing import Optional\r\n 2 \r\n----> 3 from ..utils.logging import get_logger\r\n 4 from .automatic_speech_recognition import AutomaticSpeechRecognition\r\n 5 from .base import TaskTemplate\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/__init__.py in ()\r\n 19 \r\n 20 from . import logging\r\n---> 21 from .download_manager import DownloadManager, GenerateMode\r\n 22 from .file_utils import DownloadConfig, cached_path, hf_bucket_url, is_remote_url, temp_seed\r\n 23 from .mock_download_manager import MockDownloadManager\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/download_manager.py in ()\r\n 24 \r\n 25 from .. import config\r\n---> 26 from .file_utils import (\r\n 27 DownloadConfig,\r\n 28 cached_path,\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in ()\r\n 25 import posixpath\r\n 26 import requests\r\n---> 27 from tqdm.contrib.concurrent import thread_map\r\n 28 \r\n 29 from .. import __version__, config, utils\r\n\r\nModuleNotFoundError: No module named 'tqdm.contrib.concurrent'\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.0\r\n- Platform: Colab\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694","id":949844722,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDg0NTcy","number":2694,"title":"fix: \ud83d\udc1b change string format to allow copy\/paste to work in bash","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T15:30:40Z","updated_at":"2021-07-22T10:41:47Z","closed_at":"2021-07-22T10:41:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2694","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694.patch"},"body":"Before: copy\/paste resulted in an error because the square bracket\r\ncharacters `[]` are special characters in bash","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693","id":949797014,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDQ1ODAz","number":2693,"title":"Fix OSCAR Esperanto","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T14:43:50Z","updated_at":"2021-07-21T14:53:52Z","closed_at":"2021-07-21T14:53:51Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2693","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693.patch"},"body":"The Esperanto part (original) of OSCAR has the wrong number of examples:\r\n```python\r\nfrom datasets import load_dataset\r\nraw_datasets = load_dataset(\"oscar\", \"unshuffled_original_eo\")\r\n```\r\nraises\r\n```python\r\nNonMatchingSplitsSizesError:\r\n[{'expected': SplitInfo(name='train', num_bytes=314188336, num_examples=121171, dataset_name='oscar'),\r\n'recorded': SplitInfo(name='train', num_bytes=314064514, num_examples=121168, dataset_name='oscar')}]\r\n```\r\n\r\nI updated the number of expected examples in dataset_infos.json\r\n\r\ncc @sgugger ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692","id":949765484,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDE4MDg1","number":2692,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T14:23:35Z","updated_at":"2021-07-21T15:31:41Z","closed_at":"2021-07-21T15:31:40Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2692","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692.patch"},"body":"Update BibTeX entry","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2691","id":949758379,"node_id":"MDU6SXNzdWU5NDk3NTgzNzk=","number":2691,"title":"xtreme \/ pan-x cannot be downloaded","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-21T14:18:05Z","updated_at":"2021-07-26T09:34:22Z","closed_at":"2021-07-26T09:34:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nDataset xtreme \/ pan-x cannot be loaded\r\n\r\nSeems related to https:\/\/github.com\/huggingface\/datasets\/pull\/2326\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\ndataset = load_dataset(\"xtreme\", \"PAN-X.fr\")\r\n```\r\n\r\n## Expected results\r\n\r\nLoad the dataset\r\n\r\n## Actual results\r\n\r\n```\r\nFileNotFoundError: Couldn't find file at https:\/\/www.dropbox.com\/s\/12h3qqog6q4bjve\/panx_dataset.tar?dl=1\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: macOS-11.4-x86_64-i386-64bit\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690","id":949574500,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0MjU5MDc1","number":2690,"title":"Docs details","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T10:43:14Z","updated_at":"2021-07-27T18:40:54Z","closed_at":"2021-07-27T18:40:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2690","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690.patch"},"body":"Some comments here:\r\n\r\n- the code samples assume the expected libraries have already been installed. Maybe add a section at start, or add it to every code sample. Something like `pip install datasets transformers torch 'datasets[streaming]'` (maybe just link to https:\/\/huggingface.co\/docs\/datasets\/installation.html + a one-liner that installs all the requirements \/ alternatively a requirements.txt file)\r\n- \"If you\u2019d like to play with the examples, you must install it from source.\" in https:\/\/huggingface.co\/docs\/datasets\/installation.html: it's not clear to me what this means (what are these \"examples\"?)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html: \"or AWS bucket if it\u2019s not already stored in the library\". It's the only place in the doc (aside from the docstring https:\/\/huggingface.co\/docs\/datasets\/package_reference\/loading_methods.html?highlight=aws bucket#datasets.list_datasets) where the \"AWS bucket\" is mentioned. It's not easy to understand what this means. Maybe explain more, and link to https:\/\/s3.amazonaws.com\/datasets.huggingface.co and\/or https:\/\/huggingface.co\/docs\/datasets\/filesystems.html.\r\n- example in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#manually-downloading-files is obsoleted by https:\/\/github.com\/huggingface\/datasets\/pull\/2326. Also: see https:\/\/github.com\/huggingface\/datasets\/issues\/2691 for a bug on this specific dataset.\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#manually-downloading-files the doc says \"After you\u2019ve downloaded the files, you can point to the folder hosting them locally with the data_dir argument as follows:\", but the following example does not show how to use `data_dir`\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#csv-files, it would be nice to have an URL to the csv loader reference (but I'm not sure there is one in the API reference). This comment applies in many places in the doc: I would want the API reference to contain doc for all the code\/functions\/classes... and I would want a lot more links inside the doc pointing to the API entries.\r\n- in the API reference (docstrings) I would prefer \"SOURCE\" to link to github instead of a copy of the code inside the docs site (eg. https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/load.py#L711 instead of https:\/\/huggingface.co\/docs\/datasets\/_modules\/datasets\/load.html#load_dataset)\r\n- it seems like not all the API is exposed in the doc. For example, there is no doc for [`disable_progress_bar`](https:\/\/github.com\/huggingface\/datasets\/search?q=disable_progress_bar), see https:\/\/huggingface.co\/docs\/datasets\/search.html?q=disable_progress_bar, even if the code contains docstrings. Does it mean that the function is not officially supported? (otherwise, maybe it also deserves a mention in https:\/\/huggingface.co\/docs\/datasets\/package_reference\/logging_methods.html)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html?highlight=most%20efficient%20format%20have%20json%20files%20consisting%20multiple%20json%20objects#json-files, \"The most efficient format is to have JSON files consisting of multiple JSON objects, one per line, representing individual data rows:\", maybe link to https:\/\/en.wikipedia.org\/wiki\/JSON_streaming#Line-delimited_JSON and give it a name (\"line-delimited JSON\"? \"JSON Lines\" as in https:\/\/huggingface.co\/docs\/datasets\/processing.html#exporting-a-dataset-to-csv-json-parquet-or-to-python-objects ?)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html, for the local files sections, it would be nice to provide sample csv \/ json \/ text files to download, so that it's easier for the reader to try to load them (instead: they won't try)\r\n- the doc explains how to shard a dataset, but does not explain why and when a dataset should be sharded (I have no idea... for [parallelizing](https:\/\/huggingface.co\/docs\/datasets\/processing.html#multiprocessing)?). It does neither give an idea of the number of shards a dataset typically should have and why.\r\n- the code example in https:\/\/huggingface.co\/docs\/datasets\/processing.html#mapping-in-a-distributed-setting does not work, because `training_args` has not been defined before in the doc.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2689","id":949447104,"node_id":"MDU6SXNzdWU5NDk0NDcxMDQ=","number":2689,"title":"cannot save the dataset to disk after rename_column","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-21T08:13:40Z","updated_at":"2021-07-21T13:11:04Z","closed_at":"2021-07-21T13:11:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nIf you use `rename_column` and do no other modification, you will be unable to save the dataset using `save_to_disk`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nIn [1]: from datasets import Dataset, load_from_disk\r\nIn [5]: dataset=Dataset.from_dict({'foo': [0]})\r\nIn [7]: dataset.save_to_disk('foo')\r\nIn [8]: dataset=load_from_disk('foo')\r\nIn [10]: dataset=dataset.rename_column('foo', 'bar')\r\nIn [11]: dataset.save_to_disk('foo')\r\n---------------------------------------------------------------------------\r\nPermissionError Traceback (most recent call last)\r\n in \r\n----> 1 dataset.save_to_disk('foo')\r\n\r\n\/mnt\/beegfs\/projects\/meerqat\/anaconda3\/envs\/meerqat\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in save_to_disk(self, dataset_path\r\n, fs)\r\n 597 if Path(dataset_path, config.DATASET_ARROW_FILENAME) in cache_files_paths:\r\n 598 raise PermissionError(\r\n--> 599 f\"Tried to overwrite {Path(dataset_path, config.DATASET_ARROW_FILENAME)} but a dataset can't overwrite itself.\"\r\n 600 )\r\n 601 if Path(dataset_path, config.DATASET_INDICES_FILENAME) in cache_files_paths:\r\n\r\nPermissionError: Tried to overwrite foo\/dataset.arrow but a dataset can't overwrite itself.\r\n```\r\n\r\nN. B. I created the dataset from dict to enable easy reproduction but the same happens if you load an existing dataset (e.g. starting from `In [8]`)\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-3.10.0-1160.11.1.el7.x86_64-x86_64-with-centos-7.9.2009-Core\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2688","id":949182074,"node_id":"MDU6SXNzdWU5NDkxODIwNzQ=","number":2688,"title":"hebrew language codes he and iw should be treated as aliases","user":{"login":"eyaler","id":4436747,"node_id":"MDQ6VXNlcjQ0MzY3NDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4436747?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eyaler","html_url":"https:\/\/github.com\/eyaler","followers_url":"https:\/\/api.github.com\/users\/eyaler\/followers","following_url":"https:\/\/api.github.com\/users\/eyaler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eyaler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eyaler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eyaler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eyaler\/orgs","repos_url":"https:\/\/api.github.com\/users\/eyaler\/repos","events_url":"https:\/\/api.github.com\/users\/eyaler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eyaler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-20T23:13:52Z","updated_at":"2021-07-21T16:34:53Z","closed_at":"2021-07-21T16:34:53Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"https:\/\/huggingface.co\/datasets\/mc4 not listed when searching for hebrew datasets (he) as it uses the older language code iw, preventing discoverability. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687","id":948890481,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNjY1NDI2","number":2687,"title":"Minor documentation fix","user":{"login":"slowwavesleep","id":44175589,"node_id":"MDQ6VXNlcjQ0MTc1NTg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44175589?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slowwavesleep","html_url":"https:\/\/github.com\/slowwavesleep","followers_url":"https:\/\/api.github.com\/users\/slowwavesleep\/followers","following_url":"https:\/\/api.github.com\/users\/slowwavesleep\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slowwavesleep\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slowwavesleep\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slowwavesleep\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slowwavesleep\/orgs","repos_url":"https:\/\/api.github.com\/users\/slowwavesleep\/repos","events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T17:43:23Z","updated_at":"2021-07-21T13:04:55Z","closed_at":"2021-07-21T13:04:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2687","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687.patch"},"body":"Currently, [Writing a dataset loading script](https:\/\/huggingface.co\/docs\/datasets\/add_dataset.html) page has a small error. A link to `matinf` dataset in [_Dataset scripts of reference_](https:\/\/huggingface.co\/docs\/datasets\/add_dataset.html#dataset-scripts-of-reference) section actually leads to `xsquad`, instead. This PR fixes that. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686","id":948811669,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTk4OTE3","number":2686,"title":"Fix bad config ids that name cache directories","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T16:00:45Z","updated_at":"2021-07-20T16:27:15Z","closed_at":"2021-07-20T16:27:15Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2686","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686.patch"},"body":"`data_dir=None` was considered a dataset config parameter, hence creating a special config_id for all dataset being loaded.\r\nSince the config_id is used to name the cache directories, this leaded to datasets being regenerated for users.\r\n\r\nI fixed this by ignoring the value of `data_dir` when it's `None` when computing the config_id.\r\nI also added a test to make sure the cache directories are not unexpectedly renamed in the future.\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2683","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2789\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789","id":967361934,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA5NTQwMzY5","number":2789,"title":"Updated dataset description of DaNE","user":{"login":"KennethEnevoldsen","id":23721977,"node_id":"MDQ6VXNlcjIzNzIxOTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23721977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KennethEnevoldsen","html_url":"https:\/\/github.com\/KennethEnevoldsen","followers_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/followers","following_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/orgs","repos_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/repos","events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KennethEnevoldsen\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-11T19:58:48Z","updated_at":"2021-08-11T19:58:48Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2789","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2789.patch"},"body":null,"performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2788\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2788","id":967149389,"node_id":"MDU6SXNzdWU5NjcxNDkzODk=","number":2788,"title":"How to sample every file in a list of files making up a split in a dataset when loading?","user":{"login":"brijow","id":11220949,"node_id":"MDQ6VXNlcjExMjIwOTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11220949?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/brijow","html_url":"https:\/\/github.com\/brijow","followers_url":"https:\/\/api.github.com\/users\/brijow\/followers","following_url":"https:\/\/api.github.com\/users\/brijow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/brijow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/brijow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/brijow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/brijow\/orgs","repos_url":"https:\/\/api.github.com\/users\/brijow\/repos","events_url":"https:\/\/api.github.com\/users\/brijow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/brijow\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-11T17:43:21Z","updated_at":"2021-08-11T17:43:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I am loading a dataset with multiple train, test, and validation files like this:\r\n\r\n```\r\ndata_files_dict = {\r\n \"train\": [train_file1, train_file2],\r\n \"test\": [test_file1, test_file2],\r\n \"val\": [val_file1, val_file2]\r\n}\r\ndataset = datasets.load_dataset(\r\n \"csv\",\r\n data_files=data_files_dict,\r\n split=['train[:8]', 'test[:8]', 'val[:8]']\r\n)\r\n\r\n```\r\n\r\nHowever, this only selects the first 8 rows from train_file1, test_file1, val_file1, since they are the first files in the lists.\r\n\r\nI'm trying to formulate a split argument that can sample from each file specified in my list of files that make up each split.\r\n\r\nIs this type of splitting supported? If so, how can I do it?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2787\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2787","id":967018406,"node_id":"MDU6SXNzdWU5NjcwMTg0MDY=","number":2787,"title":"ConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com","user":{"login":"jinec","id":39627475,"node_id":"MDQ6VXNlcjM5NjI3NDc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39627475?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jinec","html_url":"https:\/\/github.com\/jinec","followers_url":"https:\/\/api.github.com\/users\/jinec\/followers","following_url":"https:\/\/api.github.com\/users\/jinec\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jinec\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jinec\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jinec\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jinec\/orgs","repos_url":"https:\/\/api.github.com\/users\/jinec\/repos","events_url":"https:\/\/api.github.com\/users\/jinec\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jinec\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-11T16:19:01Z","updated_at":"2021-08-11T17:09:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello,\r\nI am trying to run run_glue.py and it gives me this error -\r\n\r\nTraceback (most recent call last):\r\n File \"E:\/BERT\/pytorch_hugging\/transformers\/examples\/pytorch\/text-classification\/run_glue.py\", line 546, in \r\n main()\r\n File \"E:\/BERT\/pytorch_hugging\/transformers\/examples\/pytorch\/text-classification\/run_glue.py\", line 250, in main\r\n datasets = load_dataset(\"glue\", data_args.task_name, cache_dir=model_args.cache_dir)\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\load.py\", line 718, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\load.py\", line 320, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\utils\\file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"C:\\install\\Anaconda3\\envs\\huggingface\\lib\\site-packages\\datasets\\utils\\file_utils.py\", line 623, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.7.0\/datasets\/glue\/glue.py\r\n\r\nTrying to do python run_glue.py --model_name_or_path\r\nbert-base-cased\r\n--task_name\r\nmrpc\r\n--do_train\r\n--do_eval\r\n--max_seq_length\r\n128\r\n--per_device_train_batch_size\r\n32\r\n--learning_rate\r\n2e-5\r\n--num_train_epochs\r\n3\r\n--output_dir\r\n.\/tmp\/mrpc\/\r\n\r\nIs this something on my end? From what I can tell, this was re-fixeded by @fullyz a few months ago.\r\nThank you!\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2786\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786","id":966282934,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA4NTQwMzU0","number":2786,"title":"Support streaming compressed files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-11T09:02:06Z","updated_at":"2021-08-11T13:11:36Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2786","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2786.patch"},"body":"Add support to stream compressed files (current options in fsspec):\r\n- bz2\r\n- lz4\r\n- xz\r\n- zstd\r\n\r\ncc: @lewtun ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2783\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783","id":965461382,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3NzcxOTM3","number":2783,"title":"Add KS task to SUPERB","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-10T22:14:07Z","updated_at":"2021-08-11T20:19:17Z","closed_at":"2021-08-11T20:19:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2783","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2783.patch"},"body":"Add the KS (keyword spotting) task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051).\r\n\r\n- [s3prl instructions](https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/README.md#ks-keyword-spotting)\r\n- [s3prl implementation](https:\/\/github.com\/s3prl\/s3prl\/blob\/master\/s3prl\/downstream\/speech_commands\/dataset.py)\r\n- [TFDS implementation](https:\/\/github.com\/tensorflow\/datasets\/blob\/master\/tensorflow_datasets\/audio\/speech_commands.py)\r\n\r\nSome notable quirks:\r\n- The dataset is originally single-archive (train+val+test all in one), but the test set has a \"canonical\" distribution in a separate archive, which is also used here (see `_split_ks_files()`). \r\n- The `_background_noise_`\/`_silence_` audio files are much longer than others, so they require some sort of slicing for downstream training. I decided to leave the implementation of that up to the users, since TFDS and s3prl take different approaches (either slicing wavs deterministically, or subsampling randomly at runtime)\r\n\r\nRelated to #2619.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2782\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782","id":964858439,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MjQ5NDE5","number":2782,"title":"Fix renaming of corpus_bleu args","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T11:02:34Z","updated_at":"2021-08-10T11:16:07Z","closed_at":"2021-08-10T11:16:07Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2782","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2782.patch"},"body":"Last `sacrebleu` release (v2.0.0) has renamed `sacrebleu.corpus_bleu` args from `(sys_stream, ref_streams)` to `(hipotheses, references)`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR passes the args without parameter names, so that it is valid for all versions of `sacrebleu`.\r\n\r\nThis is a partial hotfix of #2781.\r\n\r\nClose #2781.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2781\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2781","id":964805351,"node_id":"MDU6SXNzdWU5NjQ4MDUzNTE=","number":2781,"title":"Latest v2.0.0 release of sacrebleu has broken some metrics","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-08-10T09:59:41Z","updated_at":"2021-08-10T11:16:07Z","closed_at":"2021-08-10T11:16:07Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nAfter `sacrebleu` v2.0.0 release (see changes here: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15), some of `datasets` metrics are broken:\r\n- Default tokenizer `sacrebleu.DEFAULT_TOKENIZER` no longer exists:\r\n - #2739\r\n - #2778\r\n- Bleu tokenizers are no longer accessible with `sacrebleu.TOKENIZERS`:\r\n - #2779\r\n- `corpus_bleu` args have been renamed from `(sys_stream, ref_streams)` to `(hipotheses, references)`: \r\n - #2782 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2780\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780","id":964794764,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTk2NjA3","number":2780,"title":"VIVOS dataset for Vietnamese ASR","user":{"login":"binh234","id":57580923,"node_id":"MDQ6VXNlcjU3NTgwOTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57580923?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/binh234","html_url":"https:\/\/github.com\/binh234","followers_url":"https:\/\/api.github.com\/users\/binh234\/followers","following_url":"https:\/\/api.github.com\/users\/binh234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/binh234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/binh234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/binh234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/binh234\/orgs","repos_url":"https:\/\/api.github.com\/users\/binh234\/repos","events_url":"https:\/\/api.github.com\/users\/binh234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/binh234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T09:47:36Z","updated_at":"2021-08-11T14:09:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2780","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2780.patch"},"body":null,"performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2779\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779","id":964775085,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTgwNTgw","number":2779,"title":"Fix sacrebleu tokenizers","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T09:24:27Z","updated_at":"2021-08-10T11:03:08Z","closed_at":"2021-08-10T10:57:54Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2779","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2779.patch"},"body":"Last `sacrebleu` release (v2.0.0) has removed `sacrebleu.TOKENIZERS`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR makes a hot fix of the bug by using a private function in `sacrebleu`: `sacrebleu.metrics.bleu._get_tokenizer()`.\r\n\r\nEventually, this should be further fixed in order to use only public functions.\r\n\r\nThis is a partial hotfix of #2781.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2778\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778","id":964737422,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTQ5MTk2","number":2778,"title":"Do not pass tokenize to sacrebleu","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T08:40:37Z","updated_at":"2021-08-10T10:03:37Z","closed_at":"2021-08-10T10:03:37Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2778","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2778.patch"},"body":"Last `sacrebleu` release (v2.0.0) has removed `sacrebleu.DEFAULT_TOKENIZER`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR does not pass `tokenize` to `sacrebleu` (note that the user cannot pass it anyway) and `sacrebleu` will use its default, no matter where it is and how it is called.\r\n\r\nRelated to #2739.\r\n\r\nThis is a partial hotfix of #2781.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2777\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777","id":964696380,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA3MTEzNzg3","number":2777,"title":"Use packaging to handle versions","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-10T07:51:39Z","updated_at":"2021-08-10T11:32:51Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2777","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2777.patch"},"body":"Use packaging module to handle\/validate\/check versions of Python packages.\r\n\r\nRelated to #2769.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2776\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2776","id":964400596,"node_id":"MDU6SXNzdWU5NjQ0MDA1OTY=","number":2776,"title":"document `config.HF_DATASETS_OFFLINE` and precedence","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-09T21:23:17Z","updated_at":"2021-08-09T21:23:17Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976 implemented `HF_DATASETS_OFFLINE`, but:\r\n1. `config.HF_DATASETS_OFFLINE` is not documented\r\n2. the precedence is not documented (env, config)\r\n\r\nI'm thinking it probably should be similar to what it says https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#from-the-huggingface-hub about `datasets.config.IN_MEMORY_MAX_SIZE`:\r\n\r\nQuote:\r\n> The default in \ud83e\udd17 Datasets is to memory-map the dataset on disk unless you set datasets.config.IN_MEMORY_MAX_SIZE different from 0 bytes (default). In that case, the dataset will be copied in-memory if its size is smaller than datasets.config.IN_MEMORY_MAX_SIZE bytes, and memory-mapped otherwise. This behavior can be enabled by setting either the configuration option datasets.config.IN_MEMORY_MAX_SIZE (higher precedence) or the environment variable HF_DATASETS_IN_MEMORY_MAX_SIZE (lower precedence) to nonzero.\r\n\r\nContext: trying to use `config.HF_DATASETS_OFFLINE` here:\r\nhttps:\/\/github.com\/bigscience-workshop\/Megatron-DeepSpeed\/pull\/48\r\nbut are uncertain if it's safe, since it's not documented as a public API.\r\n\r\nThank you!\r\n\r\n@lhoestq, @albertvillanova ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2775\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2775","id":964303626,"node_id":"MDU6SXNzdWU5NjQzMDM2MjY=","number":2775,"title":"`generate_random_fingerprint()` deterministic with \ud83e\udd17Transformers' `set_seed()`","user":{"login":"mbforbes","id":1170062,"node_id":"MDQ6VXNlcjExNzAwNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1170062?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mbforbes","html_url":"https:\/\/github.com\/mbforbes","followers_url":"https:\/\/api.github.com\/users\/mbforbes\/followers","following_url":"https:\/\/api.github.com\/users\/mbforbes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mbforbes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mbforbes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mbforbes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mbforbes\/orgs","repos_url":"https:\/\/api.github.com\/users\/mbforbes\/repos","events_url":"https:\/\/api.github.com\/users\/mbforbes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mbforbes\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-09T19:28:51Z","updated_at":"2021-08-10T17:33:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\n**Update:** I dug into this to try to reproduce the underlying issue, and I believe it's that `set_seed()` from the `transformers` library makes the \"random\" fingerprint identical each time. I believe this is still a bug, because `datasets` is used exactly this way in `transformers` after `set_seed()` has been called, and I think that using `set_seed()` is a standard procedure to aid reproducibility. I've added more details to reproduce this below.\r\n\r\nHi there! I'm using my own local dataset and custom preprocessing function. My preprocessing function seems to be unpickle-able, perhaps because it is from a closure (will debug this separately). I get this warning, which is expected:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/450b9174765374111e5c6daab0ed294bc3d9b639\/src\/datasets\/fingerprint.py#L260-L265\r\n\r\nHowever, what's not expected is that the `datasets` actually _does_ seem to cache and reuse this dataset between runs! After that line, the next thing that's logged looks like:\r\n\r\n```text\r\n Loading cached processed dataset at \/home\/xxx\/.cache\/huggingface\/datasets\/csv\/default-xxx\/0.0.0\/xxx\/cache-xxx.arrow\r\n```\r\n\r\nThe path is exactly the same each run (e.g., last 26 runs).\r\n\r\nThis becomes a problem because I'll pass in the `--max_eval_samples` flag to the HuggingFace example script I'm running off of ([run_swag.py](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/pytorch\/multiple-choice\/run_swag.py)). The fact that the cached dataset is reused means this flag gets ignored. I'll try to load 100 examples, and it will load the full cached 1,000,000.\r\n\r\nI think that\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/450b9174765374111e5c6daab0ed294bc3d9b639\/src\/datasets\/fingerprint.py#L248\r\n\r\n... is actually consistent because randomness is being controlled in HuggingFace\/Transformers for reproducibility. I've added a demo of this below.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\n# Contents of print_fingerprint.py\r\nfrom transformers import set_seed\r\nfrom datasets.fingerprint import generate_random_fingerprint\r\nset_seed(42)\r\nprint(generate_random_fingerprint())\r\n```\r\n\r\n```bash\r\nfor i in {0..10}; do\r\n python print_fingerprint.py\r\ndone\r\n\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n1c80317fa3b1799d\r\n```\r\n\r\n## Expected results\r\nAfter the \"random hash\" warning is emitted, a random hash is generated, and no outdated cached datasets are reused.\r\n\r\n## Actual results\r\nAfter the \"random hash\" warning is emitted, an identical hash is generated each time, and an outdated cached dataset is reused each run.\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-5.8.0-1038-gcp-x86_64-with-glibc2.31\r\n- Python version: 3.9.6\r\n- PyArrow version: 4.0.1","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2774\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774","id":963932199,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA2NDY2MDc0","number":2774,"title":"Prevent .map from using multiprocessing when loading from cache","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-09T12:11:38Z","updated_at":"2021-08-11T09:12:01Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2774","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2774.patch"},"body":"## Context\r\n\r\nOn our setup, we use different setup to train vs proprocessing datasets. Usually we are able to obtain a high number of cpus to preprocess, which allows us to use `num_proc` however we can't use as many during training phase. Currently if we use `num_proc={whatever the preprocessing value was}` we load from cache, but we get:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/pool.py\", line 131, in worker\r\n put((job, i, result))\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/queues.py\", line 371, in put\r\n self._writer.send_bytes(obj)\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 203, in send_bytes\r\n self._send_bytes(m[offset:offset + size])\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 414, in _send_bytes\r\n self._send(header + buf)\r\n File \"lib\/python3.8\/site-packages\/multiprocess\/connection.py\", line 371, in _send\r\n n = write(self._handle, buf)\r\nBrokenPipeError: [Errno 32] Broken pipe\r\n```\r\n\r\nOur current guess, is that we're spawning too many workers compared to the umber of cpus available, and it's running OOM. (Currently we're testing if this fixes the issue) (**UPDATE**: sequentiality has fixed our issue)\r\n\r\nTherefore something that might be interesting, would be the ability to load a multiprocessed dataset sequentially? Or at least agnostic of the num_proc (as they are only used to dertermine the number of shards), and allow users to choose how to load the shards?\r\n\r\n## Current issues\r\n\r\n~I'm having a hard time making fingerprints match. For some reason, the multiprocessing and the sequential version generate two different hash.~\r\n\r\n**EDIT**: Turns out multiprocessing and sequential have different `transform` value for fingerprinting (check `fingerprint_transform`) when running `_map_single`:\r\n - sequential : `datasets.arrow_dataset.Dataset._map_single`\r\n - multiprocessing: `datasets.arrow_dataset._map_single`\r\n \r\n This discrepancy is caused by multiprocessing pickling the transformer function, it doesn't see to keep the `Dataset` hierarchy. I'm still unclear on why `func.__qual_name__` isn't handled correctly in multiprocessing. But replacing `__qualname__` by `__name__` fixes the issue.\r\n\r\n## What was done\r\n\r\n~We try to prevent the usage of multiprocessing when loading a dataset. Instead we load all cached shards sequentially.~\r\n\r\nI couldn't find a nice way to obtain the cached_file_name and check they all exist before deciding to use the multiprocessing flow or not. Instead I expose an optional boolean `sequential` in `map` method.\r\n\r\n## TODO\r\n - [x] Check that the multiprocessed version and the sequential version output the same output\r\n - [x] Check that sequential can load multiprocessed\r\n - [x] Check that multiprocessed can load sequential\r\n \r\n ## Test\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom multiprocessing import Pool\r\nimport random\r\n\r\ndef process(batch, rng):\r\n length = len(batch[\"text\"])\r\n return {**batch, \"processed_text\": [f\"PROCESSED {rng.random()}\" for _ in range(length)]}\r\n\r\ndataset = load_dataset(\"stas\/openwebtext-10k\", split=\"train\")\r\nprint(dataset.column_names)\r\nprint(type(dataset))\r\n\r\nrng = random.Random(42)\r\ndataset1 = dataset.map(process, batched=True, batch_size=50, num_proc=4, fn_kwargs={\"rng\": rng})\r\n\r\n# This one should be loaded from cache\r\nrng = random.Random(42)\r\ndataset2 = dataset.map(process, batched=True, batch_size=50, num_proc=4, fn_kwargs={\"rng\": rng}, sequential=True)\r\n\r\n# Just to check that the random generator was correct\r\nprint(dataset1[-1][\"processed_text\"])\r\nprint(dataset2[-1][\"processed_text\"])\r\n```\r\n \r\n ## Other solutions\r\n\r\nI chose to load everything sequentially, but we can probably find a way to load shards in parallel using another number of workers (essentially this would be an argument not used for fingerprinting, allowing to allow `m` shards using `n` processes, which would be very useful when same dataset have to be loaded on two different setup, and we still want to leverage cache).\r\n\r\ncc @lhoestq (since I had asked you previously on `num_proc` being used for fingerprinting). Don't know if this is acceptable.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2773\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2773","id":963730497,"node_id":"MDU6SXNzdWU5NjM3MzA0OTc=","number":2773,"title":"Remove dataset_infos.json","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-09T07:43:19Z","updated_at":"2021-08-09T07:43:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nAs discussed, there are infos in the `dataset_infos.json` which are redundant and we could have them only in the README file.\r\n\r\nOthers could be migrated to the README, like: \"dataset_size\", \"size_in_bytes\", \"download_size\", \"splits.split_name.[num_bytes, num_examples]\",...\r\n\r\nHowever, there are others that do not seem too meaningful in the README, like the checksums.\r\n\r\n**Describe the solution you'd like**\r\nOpen a discussion to decide what to do with the `dataset_infos.json` files: which information to be migrated and\/or which information to be kept.\r\n\r\ncc: @julien-c @lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2772\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2772","id":963348834,"node_id":"MDU6SXNzdWU5NjMzNDg4MzQ=","number":2772,"title":"Remove returned feature constrain","user":{"login":"PosoSAgapo","id":33200481,"node_id":"MDQ6VXNlcjMzMjAwNDgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33200481?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PosoSAgapo","html_url":"https:\/\/github.com\/PosoSAgapo","followers_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/followers","following_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/repos","events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-08T04:01:30Z","updated_at":"2021-08-08T08:48:01Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"In the current version, the returned value of the map function has to be list or ndarray. However, this makes it unsuitable for many tasks. In NLP, many features are sparse like verb words, noun chunks, if we want to assign different values to different words, which will result in a large sparse matrix if we only score useful words like verb words. \r\n\r\nMostly, when using it on large scale, saving it as a whole takes a lot of disk storage and making it hard to read, the normal method is saving it in sparse form. However, the NumPy does not support sparse, therefore I have to use PyTorch or scipy to transform a matrix into special sparse form, which is not a form that can be transformed into list or ndarry. This violates the feature constraints of the map function. \r\n\r\nI do appreciate the convenience of Datasets package, but I do not think the compulsory datatype constrain is necessary, in some cases, we just cannot transform it into a list or ndarray due to some reasons. Any way to fix this? Or what I can do to disable the compulsory datatype constrain?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2771\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771","id":963257036,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1OTExMDMw","number":2771,"title":"[WIP][Common Voice 7] Add common voice 7.0","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-07T16:01:10Z","updated_at":"2021-08-07T16:19:38Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2771","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2771.patch"},"body":"This PR allows to load the new common voice dataset manually as explained when doing: \r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\".\/datasets\/datasets\/common_voice_7\", \"ab\")\r\n```\r\n\r\n=>\r\n\r\n```\r\n Please follow the manual download instructions:\r\n\r\n You need to manually the dataset from `https:\/\/commonvoice.mozilla.org\/en\/datasets`.\r\n Make sure you choose the version `Common Voice Corpus 7.0`.\r\n Choose a language of your choice and find the corresponding language-id, *e.g.*, `Abkhaz` with language-id `ab`. The following language-ids are available:\r\n\r\n ['ab', 'ar', 'as', 'az', 'ba', 'bas', 'be', 'bg', 'br', 'ca', 'cnh', 'cs', 'cv', 'cy', 'de', 'dv', 'el', 'en', 'eo', 'es', 'et', 'eu', 'fa', 'fi', 'fr', 'fy-NL', 'ga-IE', 'gl', 'gn', 'ha', 'hi', 'hsb', 'hu', 'hy-AM', 'ia', 'id', 'it', 'ja', 'ka', 'kab', 'kk', 'kmr', 'ky', 'lg', 'lt', 'lv', 'mn', 'mt', 'nl', 'or', 'pa-IN', 'pl', 'pt', 'rm-sursilv', 'rm-vallader', 'ro', 'ru', 'rw', 'sah', 'sk', 'sl', 'sr', 'sv-SE', 'ta', 'th', 'tr', 'tt', 'ug', 'uk', 'ur', 'uz', 'vi', 'vot', 'zh-CN', 'zh-HK', 'zh-TW']\r\n\r\n Next, you will have to enter your email address to download the dataset in the `tar.gz` format. Save the file under .\r\n The file should then be extracted with: ``tar -xvzf `` which will extract a folder called ``cv-corpus-7.0-2021-07-21``.\r\n The dataset can then be loaded with `datasets.load_dataset(\"common_voice\", , data_dir=\"\", ignore_verifications=True).\r\n```\r\n\r\nHaving followed those instructions one can then download the data as follows: \r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\".\/datasets\/datasets\/common_voice_7\", \"ab\", data_dir=\".\/cv-corpus-7.0-2021-07-21\/\", ignore_verifications=True)\r\n```\r\n\r\n## TODO\r\n- [ ] Discuss naming. Is the name ok here \"common_voice_7\"? The dataset script differs only really in one point from `common_voice.py` in that all the metadata is different (more hours etc...) and that it has to use manual data dir for now\r\n- [ ] Ideally we should get a bundled download link. For `common_voice.py` there is a bundled download link: `https:\/\/voice-prod-bundler-ee1969a6ce8178826482b88e843c335139bd3fb4.s3.amazonaws.com\/cv-corpus-6.1-2020-12-11\/{}.tar.gz` that allows one to directly download the data. However such a link is missing for Common Voice 7. I guess we should try to contact common voice about it and ask whether we could host the data or help otherwise somehow. See: https:\/\/github.com\/common-voice\/common-voice-bundler\/issues\/15 cc @yjernite \r\n- [ ] I did not compute the dataset.json and it would mean that I'd have to download 76 datasets totalling around 1TB manually before running the checksum command. This just takes too much time. For now the user will have to add a `ignore_verifications=True` to download the data. This step would also be much easier if we could get a bundled link\r\n- [ ] Add dummy data","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2770\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770","id":963246512,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1OTAzMzIy","number":2770,"title":"Add support for fast tokenizer in BertScore","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-07T15:00:03Z","updated_at":"2021-08-09T12:34:43Z","closed_at":"2021-08-09T11:16:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2770","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2770.patch"},"body":"This PR adds support for a fast tokenizer in BertScore, which has been added recently to the lib.\r\nFixes #2765 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2769\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769","id":963240802,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1ODk5MTYy","number":2769,"title":"Allow PyArrow from source","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-07T14:26:44Z","updated_at":"2021-08-09T15:38:39Z","closed_at":"2021-08-09T15:38:39Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2769","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2769.patch"},"body":"When installing pyarrow from source the version is:\r\n\r\n```python\r\n>>> import pyarrow; pyarrow.__version__\r\n'2.1.0.dev612'\r\n```\r\n\r\n-> however this breaks the install check at init of `datasets`. This PR makes sure that everything coming after the last `'.'` is removed.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2768\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2768","id":963229173,"node_id":"MDU6SXNzdWU5NjMyMjkxNzM=","number":2768,"title":"`ArrowInvalid: Added column's length must match table's length.` after using `select`","user":{"login":"lvwerra","id":8264887,"node_id":"MDQ6VXNlcjgyNjQ4ODc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8264887?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lvwerra","html_url":"https:\/\/github.com\/lvwerra","followers_url":"https:\/\/api.github.com\/users\/lvwerra\/followers","following_url":"https:\/\/api.github.com\/users\/lvwerra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lvwerra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lvwerra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lvwerra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lvwerra\/orgs","repos_url":"https:\/\/api.github.com\/users\/lvwerra\/repos","events_url":"https:\/\/api.github.com\/users\/lvwerra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lvwerra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-07T13:17:29Z","updated_at":"2021-08-09T11:26:43Z","closed_at":"2021-08-09T11:26:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI would like to add a column to a downsampled dataset. However I get an error message saying the length don't match with the length of the unsampled dataset indicated. I suspect that the dataset size is not updated when calling `select`.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset(\"tweets_hate_speech_detection\")['train'].select(range(128))\r\nds = ds.add_column('ones', [1]*128)\r\n```\r\n\r\n## Expected results\r\nI would expect a new column named `ones` filled with `1`. When I check the length of `ds` it says `128`. Interestingly, it works when calling `ds = ds.map(lambda x: x)` before adding the column.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n```python\r\n---------------------------------------------------------------------------\r\nArrowInvalid Traceback (most recent call last)\r\n\/var\/folders\/l4\/2905jygx4tx5jv8_kn03vxsw0000gn\/T\/ipykernel_6301\/868709636.py in \r\n 1 from datasets import load_dataset\r\n 2 ds = load_dataset(\"tweets_hate_speech_detection\")['train'].select(range(128))\r\n----> 3 ds = ds.add_column('ones', [0]*128)\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 183 }\r\n 184 # apply actual function\r\n--> 185 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 186 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 187 # re-apply format to the output\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 395 # Call actual function\r\n 396 \r\n--> 397 out = func(self, *args, **kwargs)\r\n 398 \r\n 399 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in add_column(self, name, column, new_fingerprint)\r\n 2965 column_table = InMemoryTable.from_pydict({name: column})\r\n 2966 # Concatenate tables horizontally\r\n-> 2967 table = ConcatenationTable.from_tables([self._data, column_table], axis=1)\r\n 2968 # Update features\r\n 2969 info = self.info.copy()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in from_tables(cls, tables, axis)\r\n 715 table_blocks = to_blocks(table)\r\n 716 blocks = _extend_blocks(blocks, table_blocks, axis=axis)\r\n--> 717 return cls.from_blocks(blocks)\r\n 718 \r\n 719 @property\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in from_blocks(cls, blocks)\r\n 663 return cls(table, blocks)\r\n 664 else:\r\n--> 665 table = cls._concat_blocks_horizontally_and_vertically(blocks)\r\n 666 return cls(table, blocks)\r\n 667 \r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in _concat_blocks_horizontally_and_vertically(cls, blocks)\r\n 623 if not tables:\r\n 624 continue\r\n--> 625 pa_table_horizontally_concatenated = cls._concat_blocks(tables, axis=1)\r\n 626 pa_tables_to_concat_vertically.append(pa_table_horizontally_concatenated)\r\n 627 return cls._concat_blocks(pa_tables_to_concat_vertically, axis=0)\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/datasets\/table.py in _concat_blocks(blocks, axis)\r\n 612 else:\r\n 613 for name, col in zip(table.column_names, table.columns):\r\n--> 614 pa_table = pa_table.append_column(name, col)\r\n 615 return pa_table\r\n 616 else:\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.append_column()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.add_column()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/git\/semantic-clustering\/env\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowInvalid: Added column's length must match table's length. Expected length 31962 but got length 128\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.11.0\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 5.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2767\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2767","id":963002120,"node_id":"MDU6SXNzdWU5NjMwMDIxMjA=","number":2767,"title":"equal operation to perform unbatch for huggingface datasets ","user":{"login":"dorooddorood606","id":79288051,"node_id":"MDQ6VXNlcjc5Mjg4MDUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79288051?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorooddorood606","html_url":"https:\/\/github.com\/dorooddorood606","followers_url":"https:\/\/api.github.com\/users\/dorooddorood606\/followers","following_url":"https:\/\/api.github.com\/users\/dorooddorood606\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorooddorood606\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorooddorood606\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorooddorood606\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorooddorood606\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorooddorood606\/repos","events_url":"https:\/\/api.github.com\/users\/dorooddorood606\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorooddorood606\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-08-06T19:45:52Z","updated_at":"2021-08-07T19:56:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI need to use \"unbatch\" operation in tensorflow on a huggingface dataset, I could not find this operation, could you kindly direct me how I can do it, here is the problem I am trying to solve:\r\n\r\nI am considering \"record\" dataset in SuperGlue and I need to replicate each entery of the dataset for each answer, to make it similar to what T5 originally did:\r\n\r\nhttps:\/\/github.com\/google-research\/text-to-text-transfer-transformer\/blob\/3c58859b8fe72c2dbca6a43bc775aa510ba7e706\/t5\/data\/preprocessors.py#L925\r\n\r\nHere please find an example:\r\n\r\n For example, a typical example from ReCoRD might look like\r\n {\r\n 'passsage': 'This is the passage.',\r\n 'query': 'A @placeholder is a bird.',\r\n 'entities': ['penguin', 'potato', 'pigeon'],\r\n 'answers': ['penguin', 'pigeon'],\r\n }\r\n and I need a prosessor which would turn this example into the following two examples:\r\n {\r\n 'inputs': 'record query: A @placeholder is a bird. entities: penguin, '\r\n 'potato, pigeon passage: This is the passage.',\r\n 'targets': 'penguin',\r\n }\r\n and\r\n {\r\n 'inputs': 'record query: A @placeholder is a bird. entities: penguin, '\r\n 'potato, pigeon passage: This is the passage.',\r\n 'targets': 'pigeon',\r\n }\r\n\r\n\r\nFor doing this, one need unbatch, as each entry can map to multiple samples depending on the number of answers, I am not sure how to perform this operation with huggingface datasets library and greatly appreciate your help\r\n\r\n@lhoestq \r\n\r\nThank you very much.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2766\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766","id":962994198,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1NzAyNjM5","number":2766,"title":"fix typo (ShuffingConfig -> ShufflingConfig)","user":{"login":"daleevans","id":4944007,"node_id":"MDQ6VXNlcjQ5NDQwMDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944007?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/daleevans","html_url":"https:\/\/github.com\/daleevans","followers_url":"https:\/\/api.github.com\/users\/daleevans\/followers","following_url":"https:\/\/api.github.com\/users\/daleevans\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/daleevans\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/daleevans\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/daleevans\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/daleevans\/orgs","repos_url":"https:\/\/api.github.com\/users\/daleevans\/repos","events_url":"https:\/\/api.github.com\/users\/daleevans\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/daleevans\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-06T19:31:40Z","updated_at":"2021-08-10T14:17:03Z","closed_at":"2021-08-10T14:17:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2766","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2766.patch"},"body":"pretty straightforward, it should be Shuffling instead of Shuffing","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2765\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2765","id":962861395,"node_id":"MDU6SXNzdWU5NjI4NjEzOTU=","number":2765,"title":"BERTScore Error","user":{"login":"gagan3012","id":49101362,"node_id":"MDQ6VXNlcjQ5MTAxMzYy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/49101362?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gagan3012","html_url":"https:\/\/github.com\/gagan3012","followers_url":"https:\/\/api.github.com\/users\/gagan3012\/followers","following_url":"https:\/\/api.github.com\/users\/gagan3012\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gagan3012\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gagan3012\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gagan3012\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gagan3012\/orgs","repos_url":"https:\/\/api.github.com\/users\/gagan3012\/repos","events_url":"https:\/\/api.github.com\/users\/gagan3012\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gagan3012\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-06T15:58:57Z","updated_at":"2021-08-09T11:16:25Z","closed_at":"2021-08-09T11:16:25Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\npredictions = [\"hello there\", \"general kenobi\"]\r\nreferences = [\"hello there\", \"general kenobi\"]\r\nbert = load_metric('bertscore')\r\nbert.compute(predictions=predictions, references=references,lang='en')\r\n```\r\n\r\n# Bug\r\n`TypeError: get_hash() missing 1 required positional argument: 'use_fast_tokenizer'`\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version:\r\n- Platform: Colab \r\n- Python version:\r\n- PyArrow version:\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2764\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764","id":962554799,"node_id":"MDExOlB1bGxSZXF1ZXN0NzA1MzI3MDQ5","number":2764,"title":"Add DER metric for SUPERB speaker diarization task","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-06T09:12:36Z","updated_at":"2021-08-06T10:06:53Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2764","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2764.patch"},"body":null,"performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2763\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2763","id":961895523,"node_id":"MDU6SXNzdWU5NjE4OTU1MjM=","number":2763,"title":"English wikipedia datasets is not clean","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-05T14:37:24Z","updated_at":"2021-08-05T14:37:41Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWikipedia english dumps contain many wikipedia paragraphs like \"References\", \"Category:\" and \"See Also\" that should not be used for training.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import load_dataset\r\nw = load_dataset('wikipedia', '20200501.en')\r\nprint(w['train'][0]['text'])\r\n```\r\n\r\n> 'Yangliuqing () is a market town in Xiqing District, in the western suburbs of Tianjin, People\\'s Republic of China. Despite its relatively small size, it has been named since 2006 in the \"famous historical and cultural market towns in China\".\\n\\nIt is best known in China for creating nianhua or Yangliuqing nianhua. For more than 400 years, Yangliuqing has in effect specialised in the creation of these woodcuts for the New Year. wood block prints using vivid colourschemes to portray traditional scenes of children\\'s games often interwoven with auspiciouse objects.\\n\\n, it had 27 residential communities () and 25 villages under its administration.\\n\\nShi Family Grand Courtyard\\n\\nShi Family Grand Courtyard (Ti\u0101nj\u012bn Sh\u00ed Ji\u0101 D\u00e0 Yu\u00e0n, \u5929\u6d25\u77f3\u5bb6\u5927\u9662) is situated in Yangliuqing Town of Xiqing District, which is the former residence of wealthy merchant Shi Yuanshi - the 4th son of Shi Wancheng, one of the eight great masters in Tianjin. First built in 1875, it covers over 6,000 square meters, including large and small yards and over 200 folk houses, a theater and over 275 rooms that served as apartments and places of business and worship for this powerful family. Shifu Garden, which finished its expansion in October 2003, covers 1,200 square meters, incorporates the elegance of imperial garden and delicacy of south garden. Now the courtyard of Shi family covers about 10,000 square meters, which is called the first mansion in North China. Now it serves as the folk custom museum in Yangliuqing, which has a large collection of folk custom museum in Yanliuqing, which has a large collection of folk art pieces like Yanliuqing New Year pictures, brick sculpture.\\n\\nShi\\'s ancestor came from Dong\\'e County in Shandong Province, engaged in water transport of grain. As the wealth gradually accumulated, the Shi Family moved to Yangliuqing and bought large tracts of land and set up their residence. Shi Yuanshi came from the fourth generation of the family, who was a successful businessman and a good household manager, and the residence was thus enlarged for several times until it acquired the present scale. It is believed to be the first mansion in the west of Tianjin.\\n\\nThe residence is symmetric based on the axis formed by a passageway in the middle, on which there are four archways. On the east side of the courtyard, there are traditional single-story houses with rows of rooms around the four sides, which was once the living area for the Shi Family. The rooms on north side were the accountants\\' office. On the west are the major constructions including the family hall for worshipping Buddha, theater and the south reception room. On both sides of the residence are side yard rooms for maids and servants.\\n\\nToday, the Shi mansion, located in the township of Yangliuqing to the west of central Tianjin, stands as a surprisingly well-preserved monument to China\\'s pre-revolution mercantile spirit. It also serves as an on-location shoot for many of China\\'s popular historical dramas. Many of the rooms feature period furniture, paintings and calligraphy, and the extensive Shifu Garden.\\n\\nPart of the complex has been turned into the Yangliuqing Museum, which includes displays focused on symbolic aspects of the courtyards\\' construction, local folk art and customs, and traditional period furnishings and crafts.\\n\\n**See also \\n\\nList of township-level divisions of Tianjin\\n\\nReferences \\n\\n http:\/\/arts.cultural-china.com\/en\/65Arts4795.html\\n\\nCategory:Towns in Tianjin'**\r\n\r\n## Expected results\r\nI expect no junk in the data.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2762\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2762","id":961652046,"node_id":"MDU6SXNzdWU5NjE2NTIwNDY=","number":2762,"title":"Add RVL-CDIP dataset","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-05T09:57:05Z","updated_at":"2021-08-05T09:57:22Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** RVL-CDIP\r\n- **Description:** The RVL-CDIP (Ryerson Vision Lab Complex Document Information Processing) dataset consists of 400,000 grayscale images in 16 classes, with 25,000 images per class. There are 320,000 training images, 40,000 validation images, and 40,000 test images. The images are sized so their largest dimension does not exceed 1000 pixels.\r\n- **Paper:** https:\/\/www.cs.cmu.edu\/~aharley\/icdar15\/\r\n- **Data:** https:\/\/www.cs.cmu.edu\/~aharley\/rvl-cdip\/\r\n- **Motivation:** I'm currently adding LayoutLMv2 and LayoutXLM to HuggingFace Transformers. LayoutLM (v1) already exists in the library. This dataset has a large value for document image classification (i.e. classifying scanned documents). LayoutLM models obtain SOTA on this dataset, so would be great to directly use it in notebooks.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2761\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2761","id":961568287,"node_id":"MDU6SXNzdWU5NjE1NjgyODc=","number":2761,"title":"Error loading C4 realnewslike dataset","user":{"login":"danshirron","id":32061512,"node_id":"MDQ6VXNlcjMyMDYxNTEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32061512?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danshirron","html_url":"https:\/\/github.com\/danshirron","followers_url":"https:\/\/api.github.com\/users\/danshirron\/followers","following_url":"https:\/\/api.github.com\/users\/danshirron\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danshirron\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danshirron\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danshirron\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danshirron\/orgs","repos_url":"https:\/\/api.github.com\/users\/danshirron\/repos","events_url":"https:\/\/api.github.com\/users\/danshirron\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danshirron\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-08-05T08:16:58Z","updated_at":"2021-08-08T19:44:34Z","closed_at":"2021-08-08T19:44:34Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nError loading C4 realnewslike dataset. Validation part mismatch\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n raw_datasets = load_dataset('c4', 'realnewslike', cache_dir=model_args.cache_dir)\r\n## Expected results\r\nsuccess on data loading\r\n## Actual results\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 15.3M\/15.3M [00:00<00:00, 28.1MB\/s]Traceback (most recent call last): \r\n File \"run_mlm_tf.py\", line 794, in \r\n main() \r\n File \"run_mlm_tf.py\", line 425, in main \r\n raw_datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name, cache_dir=model_args.cache_dir) File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 843, in load_dataset \r\n builder_instance.download_and_prepare( \r\n File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 608, in download_and_prepare \r\n self._download_and_prepare( \r\n File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 698, in _download_and_prepare verify_splits(self.info.splits, split_dict) File \"\/home\/dshirron\/.local\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 74, in verify_splits \r\n raise NonMatchingSplitsSizesError(str(bad_splits)) \r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='validation', num_bytes=38165657946, num_examples=13799838, dataset_name='c4'), 'recorded': SplitInfo(name='validation', num_bytes=37875873, num_examples=13863, dataset_name='c4')}] \r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: Linux-5.4.0-58-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2760\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2760","id":961372667,"node_id":"MDU6SXNzdWU5NjEzNzI2Njc=","number":2760,"title":"Add Nuswide dataset","user":{"login":"shivangibithel","id":19774925,"node_id":"MDQ6VXNlcjE5Nzc0OTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19774925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shivangibithel","html_url":"https:\/\/github.com\/shivangibithel","followers_url":"https:\/\/api.github.com\/users\/shivangibithel\/followers","following_url":"https:\/\/api.github.com\/users\/shivangibithel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shivangibithel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shivangibithel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shivangibithel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shivangibithel\/orgs","repos_url":"https:\/\/api.github.com\/users\/shivangibithel\/repos","events_url":"https:\/\/api.github.com\/users\/shivangibithel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shivangibithel\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-05T03:00:41Z","updated_at":"2021-08-05T03:00:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *NUSWIDE*\r\n- **Description:** *[A Real-World Web Image Dataset from National University of Singapore](https:\/\/lms.comp.nus.edu.sg\/wp-content\/uploads\/2019\/research\/nuswide\/NUS-WIDE.html)*\r\n- **Paper:** *[here](https:\/\/lms.comp.nus.edu.sg\/wp-content\/uploads\/2019\/research\/nuswide\/nuswide-civr2009.pdf)*\r\n- **Data:** *[here](https:\/\/github.com\/wenting-zhao\/nuswide)*\r\n- **Motivation:** *This dataset is a benchmark in the Text Retrieval task.*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2759\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2759","id":960636572,"node_id":"MDU6SXNzdWU5NjA2MzY1NzI=","number":2759,"title":"the meteor metric seems not consist with the official version","user":{"login":"jianguda","id":9079360,"node_id":"MDQ6VXNlcjkwNzkzNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9079360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jianguda","html_url":"https:\/\/github.com\/jianguda","followers_url":"https:\/\/api.github.com\/users\/jianguda\/followers","following_url":"https:\/\/api.github.com\/users\/jianguda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jianguda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jianguda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jianguda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jianguda\/orgs","repos_url":"https:\/\/api.github.com\/users\/jianguda\/repos","events_url":"https:\/\/api.github.com\/users\/jianguda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jianguda\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-04T15:33:17Z","updated_at":"2021-08-04T17:18:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe computed meteor score seems strange because the value is very different from the scores computed by other tools. For example, I use the meteor score computed by [NLGeval](https:\/\/github.com\/Maluuba\/nlg-eval) as the reference (which reuses the official jar file for the computation)\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_metric\r\nfrom nlgeval import NLGEval, compute_individual_metrics\r\n\r\nmeteor = load_metric('meteor')\r\npredictions = [\"It is a guide to action which ensures that the military always obeys the commands of the party\"]\r\nreferences = [\"It is a guide to action that ensures that the military will forever heed Party commands\"]\r\nresults = meteor.compute(predictions=predictions, references=references)\r\n# print the actual result\r\nprint(round(results[\"meteor\"], 4))\r\nmetrics_dict = compute_individual_metrics(references, predictions[0])\r\n# print the expected result\r\nprint(round(metrics_dict[\"METEOR\"], 4))\r\n```\r\nBy the way, you need to install the `nlg-eval` library first. Please check the installation guide [here](https:\/\/github.com\/Maluuba\/nlg-eval#setup), thanks!\r\n\r\n## Expected results\r\n`0.4474`\r\n\r\n## Actual results\r\n`0.7398`\r\n\r\n## Environment info\r\n- `datasets` version: 1.10.2\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2758\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758","id":960206575,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAzMjQ5Nzky","number":2758,"title":"Raise ManualDownloadError when loading a dataset that requires previous manual download","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-04T10:19:55Z","updated_at":"2021-08-04T11:36:30Z","closed_at":"2021-08-04T11:36:30Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2758","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2758.patch"},"body":"This PR implements the raising of a `ManualDownloadError` when loading a dataset that requires previous manual download, and this is missing.\r\n\r\nThe `ManualDownloadError` is raised whether the dataset is loaded in normal or streaming mode.\r\n\r\nClose #2749.\r\n\r\ncc: @severo ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2757\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2757","id":959984081,"node_id":"MDU6SXNzdWU5NTk5ODQwODE=","number":2757,"title":"Unexpected type after `concatenate_datasets`","user":{"login":"JulesBelveze","id":32683010,"node_id":"MDQ6VXNlcjMyNjgzMDEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32683010?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JulesBelveze","html_url":"https:\/\/github.com\/JulesBelveze","followers_url":"https:\/\/api.github.com\/users\/JulesBelveze\/followers","following_url":"https:\/\/api.github.com\/users\/JulesBelveze\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JulesBelveze\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JulesBelveze\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JulesBelveze\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JulesBelveze\/orgs","repos_url":"https:\/\/api.github.com\/users\/JulesBelveze\/repos","events_url":"https:\/\/api.github.com\/users\/JulesBelveze\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JulesBelveze\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-04T07:10:39Z","updated_at":"2021-08-04T16:01:24Z","closed_at":"2021-08-04T16:01:23Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to concatenate two `Dataset` using `concatenate_datasets` but it turns out that after concatenation the features are casted from `torch.Tensor` to `list`. \r\nIt then leads to a weird tensors when trying to convert it to a `DataLoader`. However, if I use each `Dataset` separately everything behave as expected.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> featurized_teacher\r\nDataset({\r\n features: ['t_labels', 't_input_ids', 't_token_type_ids', 't_attention_mask'],\r\n num_rows: 502\r\n})\r\n>>> for f in featurized_teacher.features:\r\n print(featurized_teacher[f].shape)\r\ntorch.Size([502])\r\ntorch.Size([502, 300])\r\ntorch.Size([502, 300])\r\ntorch.Size([502, 300])\r\n\r\n>>> featurized_student\r\nDataset({\r\n features: ['s_features', 's_labels'],\r\n num_rows: 502\r\n})\r\n>>> for f in featurized_student.features:\r\n print(featurized_student[f].shape)\r\ntorch.Size([502, 64])\r\ntorch.Size([502])\r\n```\r\nThe shapes seem alright to me. Then the results after concatenation are as follow:\r\n```python\r\n>>> concat_dataset = datasets.concatenate_datasets([featurized_student, featurized_teacher], axis=1)\r\n>>> type(concat_dataset[\"t_labels\"])\r\n\r\n```\r\nOne would expect to obtain the same type as the one before concatenation.\r\n\r\nAm I doing something wrong here? Any idea on how to fix this unexpected behavior?\r\n\r\n## Environment info\r\n- `datasets` version: 1.9.0\r\n- Platform: macOS-10.14.6-x86_64-i386-64bit\r\n- Python version: 3.9.5\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2756\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756","id":959255646,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMzk4Mjk1","number":2756,"title":"Fix metadata JSON for ubuntu_dialogs_corpus dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T15:48:59Z","updated_at":"2021-08-04T09:43:25Z","closed_at":"2021-08-04T09:43:25Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2756","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2756.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2755\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755","id":959115888,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjgwMjI4","number":2755,"title":"Fix metadata JSON for turkish_movie_sentiment dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T13:25:44Z","updated_at":"2021-08-04T09:06:54Z","closed_at":"2021-08-04T09:06:53Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2755","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2755.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2754\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754","id":959105577,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjcxMjM4","number":2754,"title":"Generate metadata JSON for telugu_books dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T13:14:52Z","updated_at":"2021-08-04T08:49:02Z","closed_at":"2021-08-04T08:49:02Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2754","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2754.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2753\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753","id":959036995,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjEyMjMz","number":2753,"title":"Generate metadata JSON for reclor dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:52:29Z","updated_at":"2021-08-04T08:07:15Z","closed_at":"2021-08-04T08:07:15Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2753","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2753.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2752\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752","id":959023608,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMjAxMjAy","number":2752,"title":"Generate metadata JSON for lm1b dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:34:56Z","updated_at":"2021-08-04T06:40:40Z","closed_at":"2021-08-04T06:40:39Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2752","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2752.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2751\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751","id":959021262,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMTk5MjA5","number":2751,"title":"Update metadata for wikihow dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T11:31:57Z","updated_at":"2021-08-03T15:52:09Z","closed_at":"2021-08-03T15:52:09Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2751","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2751.patch"},"body":"Update metadata for wikihow dataset:\r\n- Remove leading new line character in description and citation\r\n- Update metadata JSON\r\n- Remove no longer necessary `urls_checksums\/checksums.txt` file\r\n\r\nRelated to #2748.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2750\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2750","id":958984730,"node_id":"MDU6SXNzdWU5NTg5ODQ3MzA=","number":2750,"title":"Second concatenation of datasets produces errors","user":{"login":"Aktsvigun","id":36672861,"node_id":"MDQ6VXNlcjM2NjcyODYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36672861?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Aktsvigun","html_url":"https:\/\/github.com\/Aktsvigun","followers_url":"https:\/\/api.github.com\/users\/Aktsvigun\/followers","following_url":"https:\/\/api.github.com\/users\/Aktsvigun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Aktsvigun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Aktsvigun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Aktsvigun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Aktsvigun\/orgs","repos_url":"https:\/\/api.github.com\/users\/Aktsvigun\/repos","events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-08-03T10:47:04Z","updated_at":"2021-08-10T11:42:07Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nI am need to concatenate my dataset with others several times, and after I concatenate it for the second time, the features of features (e.g. tags names) are collapsed. This hinders, for instance, the usage of tokenize function with `data.map`.\r\n\r\n```\r\nfrom datasets import load_dataset, concatenate_datasets\r\n\r\ndata = load_dataset('trec')['train']\r\nconcatenated = concatenate_datasets([data, data])\r\nconcatenated_2 = concatenate_datasets([concatenated, concatenated])\r\nprint('True features of features:', concatenated.features)\r\nprint('\\nProduced features of features:', concatenated_2.features)\r\n```\r\noutputs \r\n\r\n```\r\nTrue features of features: {'label-coarse': ClassLabel(num_classes=6, names=['DESC', 'ENTY', 'ABBR', 'HUM', 'NUM', 'LOC'], names_file=None, id=None), 'label-fine': ClassLabel(num_classes=47, names=['manner', 'cremat', 'animal', 'exp', 'ind', 'gr', 'title', 'def', 'date', 'reason', 'event', 'state', 'desc', 'count', 'other', 'letter', 'religion', 'food', 'country', 'color', 'termeq', 'city', 'body', 'dismed', 'mount', 'money', 'product', 'period', 'substance', 'sport', 'plant', 'techmeth', 'volsize', 'instru', 'abb', 'speed', 'word', 'lang', 'perc', 'code', 'dist', 'temp', 'symbol', 'ord', 'veh', 'weight', 'currency'], names_file=None, id=None), 'text': Value(dtype='string', id=None)}\r\n\r\nProduced features of features: {'label-coarse': Value(dtype='int64', id=None), 'label-fine': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None)}\r\n```\r\n\r\nI am using `datasets` v.1.11.0","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2749\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2749","id":958968748,"node_id":"MDU6SXNzdWU5NTg5Njg3NDg=","number":2749,"title":"Raise a proper exception when trying to stream a dataset that requires to manually download files","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-03T10:26:27Z","updated_at":"2021-08-09T08:53:35Z","closed_at":"2021-08-04T11:36:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAt least for 'reclor', 'telugu_books', 'turkish_movie_sentiment', 'ubuntu_dialogs_corpus', 'wikihow', trying to `load_dataset` in streaming mode raises a `TypeError` without any detail about why it fails.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"reclor\", streaming=True)\r\n```\r\n\r\n## Expected results\r\n\r\nIdeally: raise a specific exception, something like `ManualDownloadError`.\r\n\r\nOr at least give the reason in the message, as when we load in normal mode:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"reclor\")\r\n```\r\n\r\n```\r\nAssertionError: The dataset reclor with config default requires manual data.\r\n Please follow the manual download instructions: to use ReClor you need to download it manually. Please go to its homepage (http:\/\/whyu.me\/reclor\/) fill the google\r\n form and you will receive a download link and a password to extract it.Please extract all files in one folder and use the path folder in datasets.load_dataset('reclor', data_dir='path\/to\/folder\/folder_name')\r\n .\r\n Manual data can be loaded with `datasets.load_dataset(reclor, data_dir='')\r\n```\r\n\r\n## Actual results\r\n\r\n```\r\nTypeError: expected str, bytes or os.PathLike object, not NoneType\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: macOS-11.5-x86_64-i386-64bit\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2748\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748","id":958889041,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMDg4NTk4","number":2748,"title":"Generate metadata JSON for wikihow dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-03T08:55:40Z","updated_at":"2021-08-03T10:17:51Z","closed_at":"2021-08-03T10:17:51Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2748","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2748.patch"},"body":"Related to #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2747\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747","id":958867627,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAyMDcwOTgy","number":2747,"title":"add multi-proc in `to_json`","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2021-08-03T08:30:13Z","updated_at":"2021-08-10T20:16:44Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2747","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2747.patch"},"body":"Closes #2663. I've tried adding multiprocessing in `to_json`. Here's some benchmarking I did to compare the timings of current version (say v1) and multi-proc version (say v2). I did this with `cpu_count` 4 (2015 Macbook Air)\r\n\r\n1. Dataset name: `ascent_kb` - 8.9M samples (all samples were used, reporting this for a single run)\r\nv1- ~225 seconds for converting whole dataset to json\r\nv2- ~200 seconds for converting whole dataset to json\r\n\r\n2. Dataset name: `lama` - 1.3M samples (all samples were used, reporting this for 2 runs)\r\nv1- ~26 seconds for converting whole dataset to json\r\nv2- ~23.6 seconds for converting whole dataset to json\r\n\r\nI think it's safe to say that v2 is 10% faster as compared to v1. Timings may improve further with better configuration.\r\n\r\nThe only bottleneck I feel is writing to file from the output list. If we can improve that aspect then timings may improve further. \r\n\r\nLet me know if any changes\/improvements can be done in this @stas00, @lhoestq, @albertvillanova. @lhoestq even suggested to extend this work with other export methods as well like `csv` or `parquet`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2746\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2746","id":958551619,"node_id":"MDU6SXNzdWU5NTg1NTE2MTk=","number":2746,"title":"Cannot load `few-nerd` dataset","user":{"login":"Mehrad0711","id":28717374,"node_id":"MDQ6VXNlcjI4NzE3Mzc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28717374?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehrad0711","html_url":"https:\/\/github.com\/Mehrad0711","followers_url":"https:\/\/api.github.com\/users\/Mehrad0711\/followers","following_url":"https:\/\/api.github.com\/users\/Mehrad0711\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehrad0711\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehrad0711\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehrad0711\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehrad0711\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehrad0711\/repos","events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-08-02T22:18:57Z","updated_at":"2021-08-03T19:45:44Z","closed_at":"2021-08-03T19:45:43Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nCannot load `few-nerd` dataset.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset('few-nerd', 'supervised')\r\n```\r\n\r\n## Actual results\r\n\r\nExecuting above code will give the following error:\r\n\r\n```\r\nUsing the latest cached version of the module from \/Users\/Mehrad\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/few-nerd\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53 (last modified on Wed Jun 2 11:34:25 2021) since it couldn't be found locally at \/Users\/Mehrad\/Documents\/GitHub\/genienlp\/few-nerd\/few-nerd.py, or remotely (FileNotFoundError).\r\nDownloading and preparing dataset few_nerd\/supervised (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/Users\/Mehrad\/.cache\/huggingface\/datasets\/few_nerd\/supervised\/0.0.0\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53...\r\nTraceback (most recent call last):\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 693, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1107, in _prepare_split\r\n disable=bool(logging.get_verbosity() == logging.NOTSET),\r\n File \"\/Users\/Mehrad\/opt\/anaconda3\/lib\/python3.7\/site-packages\/tqdm\/std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"\/Users\/Mehrad\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/few-nerd\/62464ace912a40a0f33a11a8310f9041c9dc3590ff2b3c77c14d83ca53cfec53\/few-nerd.py\", line 196, in _generate_examples\r\n with open(filepath, encoding=\"utf-8\") as f:\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/Users\/Mehrad\/.cache\/huggingface\/datasets\/downloads\/supervised\/train.json'\r\n```\r\nThe bug is probably in identifying and downloading the dataset. If I download the json splits directly from [link](https:\/\/github.com\/nbroad1881\/few-nerd\/tree\/main\/uncompressed) and put them under the downloads directory, they will be processed into arrow format correctly. \r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Python version: 3.8\r\n- PyArrow version: 1.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2745\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745","id":958269579,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxNTc0Mjcz","number":2745,"title":"added semeval18_emotion_classification dataset","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-02T15:39:55Z","updated_at":"2021-08-06T13:02:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2745","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2745.patch"},"body":"I added the data set of SemEval 2018 Task 1 (Subtask 5) for emotion detection in three languages.\r\n\r\n```\r\ndatasets-cli test datasets\/semeval18_emotion_classification\/ --save_infos --all_configs\r\n\r\nRUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_real_dataset_semeval18_emotion_classification\r\n```\r\nBoth commands ran successfully.\r\n\r\nI couldn't create the dummy data (the files are tsvs but have .txt ending, maybe that's the problem?) and therefore the test on the dummy data fails, maybe someone can help here.\r\n\r\nI also formatted the code:\r\n```\r\nblack --line-length 119 --target-version py36 datasets\/semeval18_emotion_classification\/\r\nisort datasets\/semeval18_emotion_classification\/\r\nflake8 datasets\/semeval18_emotion_classification\/\r\n```\r\nThat's the publication for reference:\r\n\r\nMohammad, S., Bravo-Marquez, F., Salameh, M., & Kiritchenko, S. (2018). SemEval-2018 task 1: Affect in tweets. Proceedings of the 12th International Workshop on Semantic Evaluation, 1\u201317. https:\/\/doi.org\/10.18653\/v1\/S18-1001","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2744\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744","id":958146637,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxNDY4NDcz","number":2744,"title":"Fix key by recreating metadata JSON for journalists_questions dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T13:27:53Z","updated_at":"2021-08-03T09:25:34Z","closed_at":"2021-08-03T09:25:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2744","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2744.patch"},"body":"Close #2743.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2743\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2743","id":958119251,"node_id":"MDU6SXNzdWU5NTgxMTkyNTE=","number":2743,"title":"Dataset JSON is incorrect","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-08-02T13:01:26Z","updated_at":"2021-08-03T10:06:57Z","closed_at":"2021-08-03T09:25:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nThe JSON file generated for https:\/\/github.com\/huggingface\/datasets\/blob\/573f3d35081cee239d1b962878206e9abe6cde91\/datasets\/journalists_questions\/journalists_questions.py is https:\/\/github.com\/huggingface\/datasets\/blob\/573f3d35081cee239d1b962878206e9abe6cde91\/datasets\/journalists_questions\/dataset_infos.json.\r\n\r\nThe only config should be `plain_text`, but the first key in the JSON is `journalists_questions` (the dataset id) instead.\r\n\r\n```json\r\n{\r\n \"journalists_questions\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n## Steps to reproduce the bug\r\n\r\nLook at the files.\r\n\r\n## Expected results\r\n\r\nThe first key should be `plain_text`:\r\n\r\n```json\r\n{\r\n \"plain_text\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n## Actual results\r\n\r\n```json\r\n{\r\n \"journalists_questions\": {\r\n \"description\": \"The journalists_questions corpus (version 1.0) is a collection of 10K human-written Arabic\\ntweets manually labeled for question identification over Arabic tweets posted by journalists.\\n\",\r\n ...\r\n```\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2742\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2742","id":958114064,"node_id":"MDU6SXNzdWU5NTgxMTQwNjQ=","number":2742,"title":"Improve detection of streamable file types","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-08-02T12:55:09Z","updated_at":"2021-08-02T16:35:49Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\n```python\r\nfrom datasets import load_dataset_builder\r\nfrom datasets.utils.streaming_download_manager import StreamingDownloadManager\r\nbuilder = load_dataset_builder(\"journalists_questions\", name=\"plain_text\")\r\nbuilder._split_generators(StreamingDownloadManager(base_path=builder.base_path))\r\n```\r\n\r\nraises\r\n\r\n```\r\nNotImplementedError: Extraction protocol for file at https:\/\/drive.google.com\/uc?export=download&id=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U is not implemented yet\r\n```\r\n\r\nBut the file at https:\/\/drive.google.com\/uc?export=download&id=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U is a text file and it can be streamed:\r\n\r\n```bash\r\ncurl --header \"Range: bytes=0-100\" -L https:\/\/drive.google.com\/uc\\?export\\=download\\&id\\=1CBrh-9OrSpKmPQBxTK_ji6mq6WTN_U9U\r\n506938088174940160 yes 1\r\n302221719412830209 yes 1\r\n289761704907268096 yes 1\r\n513820885032378369 yes %\r\n```\r\n\r\nYet, it's wrongly categorized as a file type that cannot be streamed because the test is currently based on 1. the presence of a file extension at the end of the URL (here: no extension), and 2. the inclusion of this extension in a list of supported formats.\r\n\r\n**Describe the solution you'd like**\r\n\r\nIn the case of an URL (instead of a local path), ask for the MIME type, and decide on that value? Note that it would not work in that case, because the value of `content_type` is `text\/html; charset=UTF-8`.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nAdd a variable in the dataset script to set the data format by hand.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2741\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2741","id":957979559,"node_id":"MDU6SXNzdWU5NTc5Nzk1NTk=","number":2741,"title":"Add Hypersim dataset","user":{"login":"osanseviero","id":7246357,"node_id":"MDQ6VXNlcjcyNDYzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7246357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/osanseviero","html_url":"https:\/\/github.com\/osanseviero","followers_url":"https:\/\/api.github.com\/users\/osanseviero\/followers","following_url":"https:\/\/api.github.com\/users\/osanseviero\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/osanseviero\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/osanseviero\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/osanseviero\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/osanseviero\/orgs","repos_url":"https:\/\/api.github.com\/users\/osanseviero\/repos","events_url":"https:\/\/api.github.com\/users\/osanseviero\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/osanseviero\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T10:06:50Z","updated_at":"2021-08-02T10:06:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Hypersim\r\n- **Description:** photorealistic synthetic dataset for holistic indoor scene understanding\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** https:\/\/github.com\/apple\/ml-hypersim\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2740\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740","id":957911035,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxMjY0NTI3","number":2740,"title":"Update release instructions","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T08:46:00Z","updated_at":"2021-08-02T14:39:56Z","closed_at":"2021-08-02T14:39:56Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2740","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2740.patch"},"body":"Update release instructions.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2739\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739","id":957751260,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAxMTI0ODQ3","number":2739,"title":"Pass tokenize to sacrebleu only if explicitly passed by user","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-02T05:09:05Z","updated_at":"2021-08-03T04:23:37Z","closed_at":"2021-08-03T04:23:37Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2739","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2739.patch"},"body":"Next `sacrebleu` release (v2.0.0) will remove `sacrebleu.DEFAULT_TOKENIZER`: https:\/\/github.com\/mjpost\/sacrebleu\/pull\/152\/files#diff-2553a315bb1f7e68c9c1b00d56eaeb74f5205aeb3a189bc3e527b122c6078795L17-R15\r\n\r\nThis PR passes `tokenize` to `sacrebleu` only if explicitly passed by the user, otherwise it will not pass it (and `sacrebleu` will use its default, no matter where it is and how it is called).\r\n\r\nClose: #2737.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2738\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738","id":957517746,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwOTI5NzA4","number":2738,"title":"Sunbird AI Ugandan low resource language dataset","user":{"login":"ak3ra","id":12105163,"node_id":"MDQ6VXNlcjEyMTA1MTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12105163?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ak3ra","html_url":"https:\/\/github.com\/ak3ra","followers_url":"https:\/\/api.github.com\/users\/ak3ra\/followers","following_url":"https:\/\/api.github.com\/users\/ak3ra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ak3ra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ak3ra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ak3ra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ak3ra\/orgs","repos_url":"https:\/\/api.github.com\/users\/ak3ra\/repos","events_url":"https:\/\/api.github.com\/users\/ak3ra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ak3ra\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-08-01T15:18:00Z","updated_at":"2021-08-02T01:37:56Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2738","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2738.patch"},"body":"Multi-way parallel text corpus of 5 key Ugandan languages for the task of machine translation. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2737\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2737","id":957124881,"node_id":"MDU6SXNzdWU5NTcxMjQ4ODE=","number":2737,"title":"SacreBLEU update","user":{"login":"devrimcavusoglu","id":46989091,"node_id":"MDQ6VXNlcjQ2OTg5MDkx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46989091?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/devrimcavusoglu","html_url":"https:\/\/github.com\/devrimcavusoglu","followers_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/followers","following_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/repos","events_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/devrimcavusoglu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-30T23:53:08Z","updated_at":"2021-08-03T04:23:37Z","closed_at":"2021-08-03T04:23:37Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"With the latest release of [sacrebleu](https:\/\/github.com\/mjpost\/sacrebleu), `datasets.metrics.sacrebleu` is broken, and getting error.\r\n\r\n AttributeError: module 'sacrebleu' has no attribute 'DEFAULT_TOKENIZER'\r\n\r\nthis happens since in new version of sacrebleu there is no `DEFAULT_TOKENIZER`, but sacrebleu.py tries to import it anyways. This can be fixed currently with fixing `sacrebleu==1.5.0`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nsacrebleu= datasets.load_metric('sacrebleu')\r\npredictions = [\"It is a guide to action which ensures that the military always obeys the commands of the party\"]\r\nreferences = [\"It is a guide to action that ensures that the military will forever heed Party commands\"]\r\nresults = sacrebleu.compute(predictions=predictions, references=references)\r\nprint(results)\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.11.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: Python 3.8.0\r\n- PyArrow version: 5.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2736\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2736","id":956895199,"node_id":"MDU6SXNzdWU5NTY4OTUxOTk=","number":2736,"title":"Add Microsoft Building Footprints dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-30T16:17:08Z","updated_at":"2021-07-31T05:02:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Microsoft Building Footprints\r\n- **Description:** With the goal to increase the coverage of building footprint data available as open data for OpenStreetMap and humanitarian efforts, we have released millions of building footprints as open data available to download free of charge.\r\n- **Paper:** *link to the dataset paper if available*\r\n- **Data:** https:\/\/www.microsoft.com\/en-us\/maps\/building-footprints\r\n- **Motivation:** this can be a useful dataset for researchers working on climate change adaptation, urban studies, geography, etc.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nReported by: @sashavor","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2735\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2735","id":956889365,"node_id":"MDU6SXNzdWU5NTY4ODkzNjU=","number":2735,"title":"Add Open Buildings dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T16:08:39Z","updated_at":"2021-07-31T05:01:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Open Buildings\r\n- **Description:** A dataset of building footprints to support social good applications.\r\n\r\n Building footprints are useful for a range of important applications, from population estimation, urban planning and humanitarian response, to environmental and climate science. This large-scale open dataset contains the outlines of buildings derived from high-resolution satellite imagery in order to support these types of uses. The project being based in Ghana, the current focus is on the continent of Africa.\r\n\r\n See: \"Mapping Africa's Buildings with Satellite Imagery\" https:\/\/ai.googleblog.com\/2021\/07\/mapping-africas-buildings-with.html\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2107.12283\r\n- **Data:** https:\/\/sites.research.google\/open-buildings\/\r\n- **Motivation:** *what are some good reasons to have this dataset*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nReported by: @osanseviero ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2734\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734","id":956844874,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMzc4NjI4","number":2734,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T15:22:51Z","updated_at":"2021-07-30T15:47:58Z","closed_at":"2021-07-30T15:47:58Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2734","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2734.patch"},"body":"Update BibTeX entry.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2733\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733","id":956725476,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMjc1NDMy","number":2733,"title":"Add missing parquet known extension","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-30T13:01:20Z","updated_at":"2021-07-30T13:24:31Z","closed_at":"2021-07-30T13:24:30Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2733","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2733.patch"},"body":"This code was failing because the parquet extension wasn't recognized:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nbase_url = \"https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/wikipedia\/20200501.en\/1.0.0\/\"\r\ndata_files = {\"train\": base_url + \"wikipedia-train.parquet\"}\r\nwiki = load_dataset(\"parquet\", data_files=data_files, split=\"train\", streaming=True)\r\n```\r\n\r\nIt raises\r\n```python\r\nNotImplementedError: Extraction protocol for file at https:\/\/storage.googleapis.com\/huggingface-nlp\/cache\/datasets\/wikipedia\/20200501.en\/1.0.0\/wikipedia-train.parquet is not implemented yet\r\n```\r\n\r\nI added `parquet` to the list of known extensions\r\n\r\nEDIT: added pickle, conllu, xml extensions as well","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2732\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732","id":956676360,"node_id":"MDExOlB1bGxSZXF1ZXN0NzAwMjMzMzQy","number":2732,"title":"Updated TTC4900 Dataset","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-30T11:52:14Z","updated_at":"2021-07-30T16:00:51Z","closed_at":"2021-07-30T15:58:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2732","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2732.patch"},"body":"- The source address of the TTC4900 dataset of [@savasy](https:\/\/github.com\/savasy) has been updated for direct download.\r\n- Updated readme.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2731\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731","id":956087452,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5NzQwMjg5","number":2731,"title":"First draft of a method to auto-convert our datasets to TF datasets!","user":{"login":"Rocketknight1","id":12866554,"node_id":"MDQ6VXNlcjEyODY2NTU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12866554?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Rocketknight1","html_url":"https:\/\/github.com\/Rocketknight1","followers_url":"https:\/\/api.github.com\/users\/Rocketknight1\/followers","following_url":"https:\/\/api.github.com\/users\/Rocketknight1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Rocketknight1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Rocketknight1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Rocketknight1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Rocketknight1\/orgs","repos_url":"https:\/\/api.github.com\/users\/Rocketknight1\/repos","events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Rocketknight1\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-29T18:10:25Z","updated_at":"2021-08-05T16:50:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2731","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2731.patch"},"body":"Oh my **god** do not merge this yet, it's just a draft.\r\n\r\nI've added a method (via a mixin) to the `arrow_dataset.Dataset` class that automatically converts our Dataset classes to TF Dataset classes ready for training. It hopefully has most of the features we want, including streaming from disk (no need to load the whole dataset in memory!), correct shuffling, variable-length batches to reduce compute, and correct support for unusual padding. It achieves that by calling the tokenizer `pad` method in the middle of a TF compute graph via a very hacky call to `tf.py_function`, which is heretical but seems to work.\r\n\r\nA number of issues need to be resolved before it's ready to merge, though:\r\n\r\n1) Is a MixIn the right way to do this? Do other classes besides `arrow_dataset.Dataset` need this method too?\r\n2) Needs an argument to support constant-length batches for TPU training - this is easy to add and I'll do it soon.\r\n3) Needs the user to supply the list of columns to drop from the arrow `Dataset`. Is there some automatic way to get the columns we want, or see which columns were added by the tokenizer?\r\n4) Assumes the label column is always present and always called \"label\" - this is probably not great, but I'm not sure what the 'correct' thing to do here is.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2730\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2730","id":955987834,"node_id":"MDU6SXNzdWU5NTU5ODc4MzQ=","number":2730,"title":"Update CommonVoice with new release","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-29T15:59:59Z","updated_at":"2021-08-07T16:19:19Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CommonVoice mid-2021 release\r\n- **Description:** more data in CommonVoice: Languages that have increased the most by percentage are Thai (almost 20x growth, from 12 hours to 250 hours), Luganda (almost 9x growth, from 8 to 80), Esperanto (7x growth, from 100 to 840), and Tamil (almost 8x, from 24 to 220).\r\n- **Paper:** https:\/\/discourse.mozilla.org\/t\/common-voice-2021-mid-year-dataset-release\/83812\r\n- **Data:** https:\/\/commonvoice.mozilla.org\/en\/datasets\r\n- **Motivation:** More data and more varied. I think we just need to add configs in the existing dataset script.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2729\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729","id":955920489,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5NTk5MjA4","number":2729,"title":"Fix IndexError while loading Arabic Billion Words dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-29T14:47:02Z","updated_at":"2021-07-30T13:03:55Z","closed_at":"2021-07-30T13:03:55Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2729","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2729.patch"},"body":"Catch `IndexError` and ignore that record.\r\n\r\nClose #2727.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2728\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2728","id":955892970,"node_id":"MDU6SXNzdWU5NTU4OTI5NzA=","number":2728,"title":"Concurrent use of same dataset (already downloaded)","user":{"login":"PierreColombo","id":22492839,"node_id":"MDQ6VXNlcjIyNDkyODM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22492839?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PierreColombo","html_url":"https:\/\/github.com\/PierreColombo","followers_url":"https:\/\/api.github.com\/users\/PierreColombo\/followers","following_url":"https:\/\/api.github.com\/users\/PierreColombo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PierreColombo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PierreColombo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PierreColombo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PierreColombo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PierreColombo\/repos","events_url":"https:\/\/api.github.com\/users\/PierreColombo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PierreColombo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-29T14:18:38Z","updated_at":"2021-08-02T07:25:57Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen launching several jobs at the same time loading the same dataset trigger some errors see (last comments).\r\n\r\n## Steps to reproduce the bug\r\nexport HF_DATASETS_CACHE=\/gpfswork\/rech\/toto\/datasets\r\nfor MODEL in \"bert-base-uncased\" \"roberta-base\" \"distilbert-base-cased\"; do # \"bert-base-uncased\" \"bert-large-cased\" \"roberta-large\" \"albert-base-v1\" \"albert-large-v1\"; do\r\n for TASK_NAME in \"mrpc\" \"rte\" 'imdb' \"paws\" \"mnli\"; do\r\n export OUTPUT_DIR=${MODEL}_${TASK_NAME}\r\n sbatch --job-name=${OUTPUT_DIR} \\\r\n --gres=gpu:1 \\\r\n --no-requeue \\\r\n --cpus-per-task=10 \\\r\n --hint=nomultithread \\\r\n --time=1:00:00 \\\r\n --output=jobinfo\/${OUTPUT_DIR}_%j.out \\\r\n --error=jobinfo\/${OUTPUT_DIR}_%j.err \\\r\n --qos=qos_gpu-t4 \\\r\n --wrap=\"module purge; module load pytorch-gpu\/py3\/1.7.0 ; export HF_DATASETS_OFFLINE=1; export HF_DATASETS_CACHE=\/gpfswork\/rech\/toto\/datasets; python compute_measures.py --seed=$SEED --saving_path=results --batch_size=$BATCH_SIZE --task_name=$TASK_NAME --model_name=\/gpfswork\/rech\/toto\/transformers_models\/$MODEL\"\r\n\r\n done\r\ndone\r\n\r\n\r\n\r\n```python\r\n# Sample code to reproduce the bug\r\n dataset_train = load_dataset('imdb', split='train', download_mode=\"reuse_cache_if_exists\")\r\n dataset_train = dataset_train.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True).select(list(range(args.filter)))\r\n\r\n dataset_val = load_dataset('imdb', split='train', download_mode=\"reuse_cache_if_exists\")\r\n dataset_val = dataset_val.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True).select(list(range(args.filter, args.filter + 5000)))\r\n\r\n dataset_test = load_dataset('imdb', split='test', download_mode=\"reuse_cache_if_exists\")\r\n dataset_test = dataset_test.map(lambda e: tokenizer(e['text'], truncation=True, padding='max_length'),\r\n batched=True)\r\n```\r\n\r\n## Expected results\r\nI believe I am doing something wrong with the objects. \r\n\r\n## Actual results\r\nTraceback (most recent call last):\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 983, in _prepare_split\r\n check_duplicates=True,\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 192, in __init__\r\n self.stream = pa.OSFile(self._path, \"wb\")\r\n File \"pyarrow\/io.pxi\", line 829, in pyarrow.lib.OSFile.__cinit__\r\n File \"pyarrow\/io.pxi\", line 844, in pyarrow.lib.OSFile._open_writable\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 97, in pyarrow.lib.check_status\r\nFileNotFoundError: [Errno 2] Failed to open local file '\/gpfswork\/rech\/tts\/unm25jp\/datasets\/paws\/labeled_final\/1.1.0\/09d8fae989bb569009a8f5b879ccf2924d3e5cd55bfe2e89e6dab1c0b50ecd34.incomplete\/paws-test.arrow'. Detail: [errno 2] No such file or directory\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"compute_measures.py\", line 181, in \r\n train_loader, val_loader, test_loader = get_dataloader(args)\r\n File \"\/gpfsdswork\/projects\/rech\/toto\/intRAOcular\/dataset_utils.py\", line 69, in get_dataloader\r\n dataset_train = load_dataset('paws', \"labeled_final\", split='train', download_mode=\"reuse_cache_if_exists\")\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/gpfslocalsup\/pub\/anaconda-py3\/2020.02\/envs\/pytorch-gpu-1.7.0\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 658, in _download_and_prepare\r\n + str(e)\r\nOSError: Cannot find data file.\r\nOriginal error:\r\n[Errno 2] Failed to open local file '\/gpfswork\/rech\/toto\/datasets\/paws\/labeled_final\/1.1.0\/09d8fae989bb569009a8f5b879ccf2924d3e5cd55bfe2e89e6dab1c0b50ecd34.incomplete\/paws-test.arrow'. Detail: [errno 2] No such file or directory\r\n\r\n## Environment info\r\n\r\n- `datasets` version: datasets==1.8.0\r\n- Platform: linux (jeanzay)\r\n- Python version: pyarrow==2.0.0\r\n- PyArrow version: 3.7.8\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2727\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2727","id":955812149,"node_id":"MDU6SXNzdWU5NTU4MTIxNDk=","number":2727,"title":"Error in loading the Arabic Billion Words Corpus","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-29T12:53:09Z","updated_at":"2021-07-30T13:03:55Z","closed_at":"2021-07-30T13:03:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI get `IndexError: list index out of range` when trying to load the `Techreen` and `Almustaqbal` configs of the dataset.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nload_dataset(\"arabic_billion_words\", \"Techreen\")\r\nload_dataset(\"arabic_billion_words\", \"Almustaqbal\")\r\n```\r\n\r\n## Expected results\r\nThe datasets load succefully.\r\n\r\n## Actual results\r\n```python\r\n_extract_tags(self, sample, tag)\r\n 139 if len(out) > 0:\r\n 140 break\r\n--> 141 return out[0]\r\n 142 \r\n 143 def _clean_text(self, text):\r\n\r\nIndexError: list index out of range\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.2\r\n- Platform: Ubuntu 18.04.5 LTS\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2726\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726","id":955674388,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk5Mzg5MDk1","number":2726,"title":"Typo fix `tokenize_exemple`","user":{"login":"shabie","id":30535146,"node_id":"MDQ6VXNlcjMwNTM1MTQ2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30535146?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shabie","html_url":"https:\/\/github.com\/shabie","followers_url":"https:\/\/api.github.com\/users\/shabie\/followers","following_url":"https:\/\/api.github.com\/users\/shabie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shabie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shabie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shabie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shabie\/orgs","repos_url":"https:\/\/api.github.com\/users\/shabie\/repos","events_url":"https:\/\/api.github.com\/users\/shabie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shabie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-29T10:03:37Z","updated_at":"2021-07-29T12:00:25Z","closed_at":"2021-07-29T12:00:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2726","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2726.patch"},"body":"There is a small typo in the main README.md","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2725\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725","id":955020776,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4ODMwNjYw","number":2725,"title":"Pass use_auth_token to request_etags","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-28T16:13:29Z","updated_at":"2021-07-28T16:38:02Z","closed_at":"2021-07-28T16:38:02Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2725","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2725.patch"},"body":"Fix #2724.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2724\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2724","id":954919607,"node_id":"MDU6SXNzdWU5NTQ5MTk2MDc=","number":2724,"title":"404 Error when loading remote data files from private repo","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-07-28T14:24:23Z","updated_at":"2021-07-29T04:58:49Z","closed_at":"2021-07-28T16:38:01Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen loading remote data files from a private repo, a 404 error is raised.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nurl = hf_hub_url(\"lewtun\/asr-preds-test\", \"preds.jsonl\", repo_type=\"dataset\")\r\ndset = load_dataset(\"json\", data_files=url, use_auth_token=True)\r\n# HTTPError: 404 Client Error: Not Found for url: https:\/\/huggingface.co\/datasets\/lewtun\/asr-preds-test\/resolve\/main\/preds.jsonl\r\n```\r\n\r\n## Expected results\r\nLoad dataset.\r\n\r\n## Actual results\r\n404 Error.\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2723\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723","id":954864104,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4Njk0NDMw","number":2723,"title":"Fix en subset by modifying dataset_info with correct validation infos","user":{"login":"thomasw21","id":24695242,"node_id":"MDQ6VXNlcjI0Njk1MjQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24695242?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomasw21","html_url":"https:\/\/github.com\/thomasw21","followers_url":"https:\/\/api.github.com\/users\/thomasw21\/followers","following_url":"https:\/\/api.github.com\/users\/thomasw21\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomasw21\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomasw21\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomasw21\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomasw21\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomasw21\/repos","events_url":"https:\/\/api.github.com\/users\/thomasw21\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomasw21\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-28T13:36:19Z","updated_at":"2021-07-28T15:22:23Z","closed_at":"2021-07-28T15:22:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2723","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2723.patch"},"body":"- Related to: #2682 \r\n\r\nWe correct the values of `en` subset concerning the expected validation values (both `num_bytes` and `num_examples`.\r\n\r\nInstead of having:\r\n\r\n`{\"name\": \"validation\", \"num_bytes\": 828589180707, \"num_examples\": 364868892, \"dataset_name\": \"c4\"}`\r\n\r\nWe replace with correct values:\r\n\r\n`{\"name\": \"validation\", \"num_bytes\": 825767266, \"num_examples\": 364608, \"dataset_name\": \"c4\"}`\r\n\r\nThere are still issues with validation with other subsets, but I can't download all the files, unzip to check for the correct number of bytes. (If you have a fast way to obtain those values for other subsets, I can do this in this PR ... otherwise I can't spend those resources)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2722\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2722","id":954446053,"node_id":"MDU6SXNzdWU5NTQ0NDYwNTM=","number":2722,"title":"Missing cache file","user":{"login":"PosoSAgapo","id":33200481,"node_id":"MDQ6VXNlcjMzMjAwNDgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33200481?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PosoSAgapo","html_url":"https:\/\/github.com\/PosoSAgapo","followers_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/followers","following_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/repos","events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PosoSAgapo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-28T03:52:07Z","updated_at":"2021-07-28T09:07:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Strangely missing cache file after I restart my program again.\r\n\r\n`glue_dataset = datasets.load_dataset('glue', 'sst2')`\r\n\r\n`FileNotFoundError: [Errno 2] No such file or directory: \/Users\/chris\/.cache\/huggingface\/datasets\/glue\/sst2\/1.0.0\/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96d6053ad\/dataset_info.json'`\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2721\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721","id":954238230,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk4MTY0Njg3","number":2721,"title":"Deal with the bad check in test_load.py","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-27T20:23:23Z","updated_at":"2021-07-28T09:58:34Z","closed_at":"2021-07-28T08:53:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2721","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2721.patch"},"body":"This PR removes a check that's been added in #2684. My intention with this check was to capture an URL in the error message, but instead, it captures a substring of the previous regex match in the test function. Another option would be to replace this check with:\r\n```python\r\nm_paths = re.findall(r\"\\S*_dummy\/_dummy.py\\b\", str(exc_info.value)) # on Linux this will match an URL as well as a local_path due to different os.sep, so take the last element (an URL always comes last in the list)\r\nassert len(m_paths) > 0 and is_remote_url(m_paths[-1]) # is_remote_url comes from datasets.utils.file_utils\r\n```\r\n\r\n@lhoestq Let me know which one of these two approaches (delete or replace) do you prefer?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2720\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720","id":954024426,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3OTgxNjMx","number":2720,"title":"fix: \ud83d\udc1b fix two typos","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-27T15:50:17Z","updated_at":"2021-07-27T18:38:17Z","closed_at":"2021-07-27T18:38:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2720","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2720.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2719\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2719","id":953932416,"node_id":"MDU6SXNzdWU5NTM5MzI0MTY=","number":2719,"title":"Use ETag in streaming mode to detect resource updates","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-27T14:17:09Z","updated_at":"2021-07-27T14:17:09Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nI want to cache data I generate from processing a dataset I've loaded in streaming mode, but I've currently no way to know if the remote data has been updated or not, thus I don't know when to invalidate my cache.\r\n\r\n**Describe the solution you'd like**\r\n\r\nTake the ETag of the data files into account and provide it (directly or through a hash) to give a signal that I can invalidate my cache.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nNone\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2718\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718","id":953360663,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3NDE0NTQy","number":2718,"title":"Docs structure","user":{"login":"stevhliu","id":59462357,"node_id":"MDQ6VXNlcjU5NDYyMzU3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/59462357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stevhliu","html_url":"https:\/\/github.com\/stevhliu","followers_url":"https:\/\/api.github.com\/users\/stevhliu\/followers","following_url":"https:\/\/api.github.com\/users\/stevhliu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stevhliu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stevhliu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stevhliu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stevhliu\/orgs","repos_url":"https:\/\/api.github.com\/users\/stevhliu\/repos","events_url":"https:\/\/api.github.com\/users\/stevhliu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stevhliu\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-26T23:15:13Z","updated_at":"2021-08-11T00:17:23Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2718","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2718.patch"},"body":"Organize Datasets documentation into four documentation types to improve clarity and discoverability of content.\r\n\r\n- [x] Tutorials\r\n- [x] How-to guides\r\n- [ ] Conceptual guides (mostly done, need Quentin's help with a few sections)\r\n- [x] Reference\r\n- [x] Review","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2717\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717","id":952979976,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk3MDkzNDEx","number":2717,"title":"Fix shuffle on IterableDataset that disables batching in case any functions were mapped","user":{"login":"amankhandelia","id":7098967,"node_id":"MDQ6VXNlcjcwOTg5Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7098967?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amankhandelia","html_url":"https:\/\/github.com\/amankhandelia","followers_url":"https:\/\/api.github.com\/users\/amankhandelia\/followers","following_url":"https:\/\/api.github.com\/users\/amankhandelia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amankhandelia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amankhandelia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amankhandelia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amankhandelia\/orgs","repos_url":"https:\/\/api.github.com\/users\/amankhandelia\/repos","events_url":"https:\/\/api.github.com\/users\/amankhandelia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amankhandelia\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-26T14:42:22Z","updated_at":"2021-07-26T18:04:14Z","closed_at":"2021-07-26T16:30:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2717","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2717.patch"},"body":"Made a very minor change to fix the issue#2716. Added the missing argument in the constructor call.\r\n\r\nAs discussed in the bug report, the change is made to prevent the `shuffle` method call from resetting the value of `batched` attribute in `MappedExamplesIterable`\r\n\r\nFix #2716.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2716\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2716","id":952902778,"node_id":"MDU6SXNzdWU5NTI5MDI3Nzg=","number":2716,"title":"Calling shuffle on IterableDataset will disable batching in case any functions were mapped","user":{"login":"amankhandelia","id":7098967,"node_id":"MDQ6VXNlcjcwOTg5Njc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7098967?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amankhandelia","html_url":"https:\/\/github.com\/amankhandelia","followers_url":"https:\/\/api.github.com\/users\/amankhandelia\/followers","following_url":"https:\/\/api.github.com\/users\/amankhandelia\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amankhandelia\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amankhandelia\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amankhandelia\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amankhandelia\/orgs","repos_url":"https:\/\/api.github.com\/users\/amankhandelia\/repos","events_url":"https:\/\/api.github.com\/users\/amankhandelia\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amankhandelia\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-26T13:24:59Z","updated_at":"2021-07-26T18:04:43Z","closed_at":"2021-07-26T18:04:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"When using dataset in streaming mode, if one applies `shuffle` method on the dataset and `map` method for which `batched=True` than the batching operation will not happen, instead `batched` will be set to `False`\r\n\r\nI did RCA on the dataset codebase, the problem is emerging from [this line of code](https:\/\/github.com\/huggingface\/datasets\/blob\/d25a0bf94d9f9a9aa6cabdf5b450b9c327d19729\/src\/datasets\/iterable_dataset.py#L197) here as it is\r\n`self.ex_iterable.shuffle_data_sources(seed), function=self.function, batch_size=self.batch_size`, as one can see it is missing batched argument, which means that the iterator fallsback to default constructor value, which in this case is `False`.\r\nTo remedy the problem we can change this line to\r\n`self.ex_iterable.shuffle_data_sources(seed), function=self.function, batched=self.batched, batch_size=self.batch_size`\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2715\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715","id":952845229,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2OTc5MjQ1","number":2715,"title":"Update PAN-X data URL in XTREME dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-26T12:21:17Z","updated_at":"2021-07-26T13:27:59Z","closed_at":"2021-07-26T13:27:59Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2715","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2715.patch"},"body":"Related to #2710, #2691.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2714\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2714","id":952580820,"node_id":"MDU6SXNzdWU5NTI1ODA4MjA=","number":2714,"title":"add more precise information for size","user":{"login":"pennyl67","id":1493902,"node_id":"MDQ6VXNlcjE0OTM5MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1493902?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pennyl67","html_url":"https:\/\/github.com\/pennyl67","followers_url":"https:\/\/api.github.com\/users\/pennyl67\/followers","following_url":"https:\/\/api.github.com\/users\/pennyl67\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pennyl67\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pennyl67\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pennyl67\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pennyl67\/orgs","repos_url":"https:\/\/api.github.com\/users\/pennyl67\/repos","events_url":"https:\/\/api.github.com\/users\/pennyl67\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pennyl67\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-26T07:11:03Z","updated_at":"2021-07-26T09:16:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"For the import into ELG, we would like a more precise description of the size of the dataset, instead of the current size categories. The size can be expressed in bytes, or any other preferred size unit. As suggested in the slack channel, perhaps this could be computed with a regex for existing datasets.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2713\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713","id":952515256,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2Njk3MzU0","number":2713,"title":"Enumerate all ner_tags values in WNUT 17 dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-26T05:22:16Z","updated_at":"2021-07-26T09:30:55Z","closed_at":"2021-07-26T09:30:55Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2713","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2713.patch"},"body":"This PR does:\r\n- Enumerate all ner_tags in dataset card Data Fields section\r\n- Add all metadata tags to dataset card\r\n\r\nClose #2709.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2710\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710","id":951723326,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk2MDYyNjAy","number":2710,"title":"Update WikiANN data URL","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-23T16:29:21Z","updated_at":"2021-07-26T09:34:23Z","closed_at":"2021-07-26T09:34:23Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2710","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2710.patch"},"body":"WikiANN data source URL is no longer accessible: 404 error from Dropbox.\r\n\r\nWe have decided to host it at Hugging Face. This PR updates the data source URL, the metadata JSON file and the dataset card.\r\n\r\nClose #2691.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2709\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2709","id":951534757,"node_id":"MDU6SXNzdWU5NTE1MzQ3NTc=","number":2709,"title":"Missing documentation for wnut_17 (ner_tags)","user":{"login":"maxpel","id":31095360,"node_id":"MDQ6VXNlcjMxMDk1MzYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31095360?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/maxpel","html_url":"https:\/\/github.com\/maxpel","followers_url":"https:\/\/api.github.com\/users\/maxpel\/followers","following_url":"https:\/\/api.github.com\/users\/maxpel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/maxpel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/maxpel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/maxpel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/maxpel\/orgs","repos_url":"https:\/\/api.github.com\/users\/maxpel\/repos","events_url":"https:\/\/api.github.com\/users\/maxpel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/maxpel\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-23T12:25:32Z","updated_at":"2021-07-26T09:30:55Z","closed_at":"2021-07-26T09:30:55Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"On the info page of the wnut_17 data set (https:\/\/huggingface.co\/datasets\/wnut_17), the model output of ner-tags is only documented for these 5 cases:\r\n\r\n`ner_tags: a list of classification labels, with possible values including O (0), B-corporation (1), I-corporation (2), B-creative-work (3), I-creative-work (4).`\r\n\r\nI trained a model with the data and it gives me 13 classes:\r\n\r\n```\r\n\"id2label\": {\r\n \"0\": 0,\r\n \"1\": 1,\r\n \"2\": 2,\r\n \"3\": 3,\r\n \"4\": 4,\r\n \"5\": 5,\r\n \"6\": 6,\r\n \"7\": 7,\r\n \"8\": 8,\r\n \"9\": 9,\r\n \"10\": 10,\r\n \"11\": 11,\r\n \"12\": 12\r\n }\r\n\r\n \"label2id\": {\r\n \"0\": 0,\r\n \"1\": 1,\r\n \"10\": 10,\r\n \"11\": 11,\r\n \"12\": 12,\r\n \"2\": 2,\r\n \"3\": 3,\r\n \"4\": 4,\r\n \"5\": 5,\r\n \"6\": 6,\r\n \"7\": 7,\r\n \"8\": 8,\r\n \"9\": 9\r\n }\r\n```\r\nThe paper (https:\/\/www.aclweb.org\/anthology\/W17-4418.pdf) explains those 6 categories, but the ordering does not match:\r\n\r\n```\r\n1. person\r\n2. location (including GPE, facility)\r\n3. corporation\r\n4. product (tangible goods, or well-defined\r\nservices)\r\n5. creative-work (song, movie, book and\r\nso on)\r\n6. group (subsuming music band, sports team,\r\nand non-corporate organisations)\r\n```\r\nI would be very helpful for me, if somebody could clarify the model ouputs and explain the \"B-\" and \"I-\" prefixes to me.\r\n\r\nReally great work with that and the other packages, I couldn't believe that training the model with that data was basically a one-liner!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2708\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2708","id":951092660,"node_id":"MDU6SXNzdWU5NTEwOTI2NjA=","number":2708,"title":"QASC: incomplete training set ","user":{"login":"danyaljj","id":2441454,"node_id":"MDQ6VXNlcjI0NDE0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2441454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danyaljj","html_url":"https:\/\/github.com\/danyaljj","followers_url":"https:\/\/api.github.com\/users\/danyaljj\/followers","following_url":"https:\/\/api.github.com\/users\/danyaljj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danyaljj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danyaljj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danyaljj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danyaljj\/orgs","repos_url":"https:\/\/api.github.com\/users\/danyaljj\/repos","events_url":"https:\/\/api.github.com\/users\/danyaljj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danyaljj\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-22T21:59:44Z","updated_at":"2021-07-23T13:30:07Z","closed_at":"2021-07-23T13:30:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe training instances are not loaded properly. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"qasc\", script_version='1.10.2')\r\n \r\ndef load_instances(split): \r\n instances = dataset[split]\r\n print(f\"split: {split} - size: {len(instances)}\")\r\n for x in instances:\r\n print(json.dumps(x))\r\n\r\n\r\nload_instances('test')\r\nload_instances('validation')\r\nload_instances('train')\r\n```\r\n\r\n## results\r\nFor test and validation, we can see the examples in the output (which is good!): \r\n```\r\nsplit: test - size: 920\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Anthax\", \"under water\", \"uterus\", \"wombs\", \"two\", \"moles\", \"live\", \"embryo\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"What type of birth do therian mammals have? (A) Anthax (B) under water (C) uterus (D) wombs (E) two (F) moles (G) live (H) embryo\", \"id\": \"3C44YUNSI1OBFBB8D36GODNOZN9DPA\", \"question\": \"What type of birth do therian mammals have?\"}\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Corvidae\", \"arthropods\", \"birds\", \"backbones\", \"keratin\", \"Jurassic\", \"front paws\", \"Parakeets.\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"By what time had mouse-sized viviparous mammals evolved? (A) Corvidae (B) arthropods (C) birds (D) backbones (E) keratin (F) Jurassic (G) front paws (H) Parakeets.\", \"id\": \"3B1NLC6UGZVERVLZFT7OUYQLD1SGPZ\", \"question\": \"By what time had mouse-sized viviparous mammals evolved?\"}\r\n{\"answerKey\": \"\", \"choices\": {\"label\": [\"A\", \"B\", \"C\", \"D\", \"E\", \"F\", \"G\", \"H\"], \"text\": [\"Reduced friction\", \"causes infection\", \"vital to a good life\", \"prevents water loss\", \"camouflage from consumers\", \"Protection against predators\", \"spur the growth of the plant\", \"a smooth surface\"]}, \"combinedfact\": \"\", \"fact1\": \"\", \"fact2\": \"\", \"formatted_question\": \"What does a plant's skin do? (A) Reduced friction (B) causes infection (C) vital to a good life (D) prevents water loss (E) camouflage from consumers (F) Protection against predators (G) spur the growth of the plant (H) a smooth surface\", \"id\": \"3QRYMNZ7FYGITFVSJET3PS0F4S0NT9\", \"question\": \"What does a plant's skin do?\"}\r\n...\r\n```\r\nHowever, only a few instances are loaded for the training split, which is not correct. \r\n\r\n## Environment info\r\n- `datasets` version: '1.10.2' \r\n- Platform: MaxOS \r\n- Python version:3.7\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2707\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2707","id":950812945,"node_id":"MDU6SXNzdWU5NTA4MTI5NDU=","number":2707,"title":"404 Not Found Error when loading LAMA dataset","user":{"login":"dwil2444","id":26467159,"node_id":"MDQ6VXNlcjI2NDY3MTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26467159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dwil2444","html_url":"https:\/\/github.com\/dwil2444","followers_url":"https:\/\/api.github.com\/users\/dwil2444\/followers","following_url":"https:\/\/api.github.com\/users\/dwil2444\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dwil2444\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dwil2444\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dwil2444\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dwil2444\/orgs","repos_url":"https:\/\/api.github.com\/users\/dwil2444\/repos","events_url":"https:\/\/api.github.com\/users\/dwil2444\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dwil2444\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-22T15:52:33Z","updated_at":"2021-07-26T14:29:07Z","closed_at":"2021-07-26T14:29:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"The [LAMA](https:\/\/huggingface.co\/datasets\/viewer\/?dataset=lama) probing dataset is not available for download: \r\n\r\nSteps to Reproduce: \r\n\r\n1. `from datasets import load_dataset`\r\n2. `dataset = load_dataset('lama', 'trex')`. \r\n\r\n\r\nResults: \r\n`FileNotFoundError: Couldn't find file locally at lama\/lama.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/lama\/lama.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/lama\/lama.py`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2706\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706","id":950606561,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk1MTI3ODgz","number":2706,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T12:29:29Z","updated_at":"2021-07-22T12:43:00Z","closed_at":"2021-07-22T12:43:00Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2706","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2706.patch"},"body":"Update BibTeX entry.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2705\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2705","id":950488583,"node_id":"MDU6SXNzdWU5NTA0ODg1ODM=","number":2705,"title":"404 not found error on loading WIKIANN dataset","user":{"login":"ronbutan","id":39296659,"node_id":"MDQ6VXNlcjM5Mjk2NjU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39296659?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ronbutan","html_url":"https:\/\/github.com\/ronbutan","followers_url":"https:\/\/api.github.com\/users\/ronbutan\/followers","following_url":"https:\/\/api.github.com\/users\/ronbutan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ronbutan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ronbutan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ronbutan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ronbutan\/orgs","repos_url":"https:\/\/api.github.com\/users\/ronbutan\/repos","events_url":"https:\/\/api.github.com\/users\/ronbutan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ronbutan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T09:55:50Z","updated_at":"2021-07-23T08:07:32Z","closed_at":"2021-07-23T08:07:32Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nUnable to retreive wikiann English dataset\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import list_datasets, load_dataset, list_metrics, load_metric\r\nWIKIANN = load_dataset(\"wikiann\",\"en\")\r\n```\r\n\r\n## Expected results\r\nColab notebook should display successful download status\r\n\r\n## Actual results\r\nFileNotFoundError: Couldn't find file at https:\/\/www.dropbox.com\/s\/12h3qqog6q4bjve\/panx_dataset.tar?dl=1\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.1\r\n- Platform: Linux-5.4.104+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2704\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704","id":950483980,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk1MDIzMTEz","number":2704,"title":"Fix pick default config name message","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T09:49:43Z","updated_at":"2021-07-22T10:02:41Z","closed_at":"2021-07-22T10:02:40Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2704","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2704.patch"},"body":"The error message to tell which config name to load is not displayed. \r\n\r\nThis is because in the code it was considering the config kwargs to be non-empty, which is a special case for custom configs created on the fly. It appears after this change: https:\/\/github.com\/huggingface\/datasets\/pull\/2659\r\n\r\nI fixed that by making the config kwargs empty by default, even if default parameters are passed\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2703","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2703\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2703","id":950482284,"node_id":"MDU6SXNzdWU5NTA0ODIyODQ=","number":2703,"title":"Bad message when config name is missing","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-22T09:47:23Z","updated_at":"2021-07-22T10:02:40Z","closed_at":"2021-07-22T10:02:40Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"When loading a dataset that have several configurations, we expect to see an error message if the user doesn't specify a config name.\r\n\r\nHowever in `datasets` 1.10.0 and 1.10.1 it doesn't show the right message:\r\n\r\n```python\r\nimport datasets\r\n\r\ndatasets.load_dataset(\"glue\")\r\n```\r\nraises\r\n```python\r\nAttributeError: 'BuilderConfig' object has no attribute 'text_features'\r\n```\r\ninstead of\r\n```python\r\nValueError: Config name is missing.\r\nPlease pick one among the available configs: ['cola', 'sst2', 'mrpc', 'qqp', 'stsb', 'mnli', 'mnli_mismatched', 'mnli_matched', 'qnli', 'rte', 'wnli', 'ax']\r\nExample of usage:\r\n `load_dataset('glue', 'cola')`\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2702\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702","id":950448159,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0OTkyOTc1","number":2702,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T09:04:39Z","updated_at":"2021-07-22T09:17:39Z","closed_at":"2021-07-22T09:17:38Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2702","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2702.patch"},"body":"Update BibTeX entry.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2701\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701","id":950422403,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0OTcxMzM3","number":2701,"title":"Fix download_mode docstrings","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-22T08:30:25Z","updated_at":"2021-07-22T09:33:31Z","closed_at":"2021-07-22T09:33:31Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2701","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2701.patch"},"body":"Fix `download_mode` docstrings.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2700\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2700","id":950276325,"node_id":"MDU6SXNzdWU5NTAyNzYzMjU=","number":2700,"title":"from datasets import Dataset is failing ","user":{"login":"kswamy15","id":5582286,"node_id":"MDQ6VXNlcjU1ODIyODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5582286?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kswamy15","html_url":"https:\/\/github.com\/kswamy15","followers_url":"https:\/\/api.github.com\/users\/kswamy15\/followers","following_url":"https:\/\/api.github.com\/users\/kswamy15\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kswamy15\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kswamy15\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kswamy15\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kswamy15\/orgs","repos_url":"https:\/\/api.github.com\/users\/kswamy15\/repos","events_url":"https:\/\/api.github.com\/users\/kswamy15\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kswamy15\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T03:51:23Z","updated_at":"2021-07-22T07:23:45Z","closed_at":"2021-07-22T07:09:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nfrom datasets import Dataset\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in ()\r\n 25 import posixpath\r\n 26 import requests\r\n---> 27 from tqdm.contrib.concurrent import thread_map\r\n 28 \r\n 29 from .. import __version__, config, utils\r\n\r\nModuleNotFoundError: No module named 'tqdm.contrib.concurrent'\r\n\r\n---------------------------------------------------------------------------\r\nNOTE: If your import is failing due to a missing package, you can\r\nmanually install dependencies using either !pip or !apt.\r\n\r\nTo view examples of installing some common dependencies, click the\r\n\"Open Examples\" button below.\r\n---------------------------------------------------------------------------\r\n\r\n## Environment info\r\n\r\n- `datasets` version: latest version as of 07\/21\/2021\r\n- Platform: Google Colab\r\n- Python version: 3.7\r\n- PyArrow version:\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2699\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2699","id":950221226,"node_id":"MDU6SXNzdWU5NTAyMjEyMjY=","number":2699,"title":"cannot combine splits merging and streaming?","user":{"login":"eyaler","id":4436747,"node_id":"MDQ6VXNlcjQ0MzY3NDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4436747?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eyaler","html_url":"https:\/\/github.com\/eyaler","followers_url":"https:\/\/api.github.com\/users\/eyaler\/followers","following_url":"https:\/\/api.github.com\/users\/eyaler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eyaler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eyaler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eyaler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eyaler\/orgs","repos_url":"https:\/\/api.github.com\/users\/eyaler\/repos","events_url":"https:\/\/api.github.com\/users\/eyaler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eyaler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-22T01:13:25Z","updated_at":"2021-07-22T08:27:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"this does not work:\r\n`dataset = datasets.load_dataset('mc4','iw',split='train+validation',streaming=True)`\r\nwith error:\r\n`ValueError: Bad split: train+validation. Available splits: ['train', 'validation']`\r\n\r\nthese work:\r\n`dataset = datasets.load_dataset('mc4','iw',split='train+validation')`\r\n`dataset = datasets.load_dataset('mc4','iw',split='train',streaming=True)`\r\n`dataset = datasets.load_dataset('mc4','iw',split='validation',streaming=True)`\r\n\r\ni could not find a reference to this in the documentation and the error message is confusing. also would be nice to allow streaming for the merged splits","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2698\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698","id":950159867,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NzUxMzMw","number":2698,"title":"Ignore empty batch when writing","user":{"login":"pcuenca","id":1177582,"node_id":"MDQ6VXNlcjExNzc1ODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1177582?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pcuenca","html_url":"https:\/\/github.com\/pcuenca","followers_url":"https:\/\/api.github.com\/users\/pcuenca\/followers","following_url":"https:\/\/api.github.com\/users\/pcuenca\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pcuenca\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pcuenca\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pcuenca\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pcuenca\/orgs","repos_url":"https:\/\/api.github.com\/users\/pcuenca\/repos","events_url":"https:\/\/api.github.com\/users\/pcuenca\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pcuenca\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T22:35:30Z","updated_at":"2021-07-26T14:56:03Z","closed_at":"2021-07-26T13:25:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2698","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2698.patch"},"body":"This prevents an schema update with unknown column types, as reported in #2644.\r\n\r\nThis is my first attempt at fixing the issue. I tested the following:\r\n- First batch returned by a batched map operation is empty.\r\n- An intermediate batch is empty.\r\n- `python -m unittest tests.test_arrow_writer` passes.\r\n\r\nHowever, `arrow_writer` looks like a pretty generic interface, I'm not sure if there are other uses I may have overlooked. Let me know if that's the case, or if a better approach would be preferable.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2697\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697","id":950021623,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NjMyODg0","number":2697,"title":"Fix import on Colab","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T19:03:38Z","updated_at":"2021-07-22T07:09:08Z","closed_at":"2021-07-22T07:09:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2697","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2697.patch"},"body":"Fix #2695, fix #2700. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2696\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696","id":949901726,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NTMwODg3","number":2696,"title":"Add support for disable_progress_bar on Windows","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T16:34:53Z","updated_at":"2021-07-26T13:31:14Z","closed_at":"2021-07-26T09:38:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2696","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2696.patch"},"body":"This PR is a continuation of #2667 and adds support for `utils.disable_progress_bar()` on Windows when using multiprocessing. This [answer](https:\/\/stackoverflow.com\/a\/6596695\/14095927) on SO explains it nicely why the current approach (with calling `utils.is_progress_bar_enabled()` inside `Dataset._map_single`) would not work on Windows.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2695\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2695","id":949864823,"node_id":"MDU6SXNzdWU5NDk4NjQ4MjM=","number":2695,"title":"Cannot import load_dataset on Colab","user":{"login":"bayartsogt-ya","id":43239645,"node_id":"MDQ6VXNlcjQzMjM5NjQ1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43239645?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bayartsogt-ya","html_url":"https:\/\/github.com\/bayartsogt-ya","followers_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/followers","following_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/orgs","repos_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/repos","events_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bayartsogt-ya\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-21T15:52:51Z","updated_at":"2021-07-22T07:26:25Z","closed_at":"2021-07-22T07:09:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nGot tqdm concurrent module not found error during importing load_dataset from datasets.\r\n\r\n## Steps to reproduce the bug\r\nHere [colab notebook](https:\/\/colab.research.google.com\/drive\/1pErWWnVP4P4mVHjSFUtkePd8Na_Qirg4?usp=sharing) to reproduce the error\r\n\r\nOn colab:\r\n```python\r\n!pip install datasets\r\nfrom datasets import load_dataset\r\n```\r\n\r\n## Expected results\r\nWorks without error\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n```\r\nModuleNotFoundError Traceback (most recent call last)\r\n in ()\r\n----> 1 from datasets import load_dataset, load_metric, Metric, MetricInfo, Features, Value\r\n 2 from sklearn.metrics import mean_squared_error\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/__init__.py in ()\r\n 31 )\r\n 32 \r\n---> 33 from .arrow_dataset import Dataset, concatenate_datasets\r\n 34 from .arrow_reader import ArrowReader, ReadInstruction\r\n 35 from .arrow_writer import ArrowWriter\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in ()\r\n 40 from tqdm.auto import tqdm\r\n 41 \r\n---> 42 from datasets.tasks.text_classification import TextClassification\r\n 43 \r\n 44 from . import config, utils\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/tasks\/__init__.py in ()\r\n 1 from typing import Optional\r\n 2 \r\n----> 3 from ..utils.logging import get_logger\r\n 4 from .automatic_speech_recognition import AutomaticSpeechRecognition\r\n 5 from .base import TaskTemplate\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/__init__.py in ()\r\n 19 \r\n 20 from . import logging\r\n---> 21 from .download_manager import DownloadManager, GenerateMode\r\n 22 from .file_utils import DownloadConfig, cached_path, hf_bucket_url, is_remote_url, temp_seed\r\n 23 from .mock_download_manager import MockDownloadManager\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/download_manager.py in ()\r\n 24 \r\n 25 from .. import config\r\n---> 26 from .file_utils import (\r\n 27 DownloadConfig,\r\n 28 cached_path,\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in ()\r\n 25 import posixpath\r\n 26 import requests\r\n---> 27 from tqdm.contrib.concurrent import thread_map\r\n 28 \r\n 29 from .. import __version__, config, utils\r\n\r\nModuleNotFoundError: No module named 'tqdm.contrib.concurrent'\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.10.0\r\n- Platform: Colab\r\n- Python version: 3.7.11\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2694\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694","id":949844722,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDg0NTcy","number":2694,"title":"fix: \ud83d\udc1b change string format to allow copy\/paste to work in bash","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T15:30:40Z","updated_at":"2021-07-22T10:41:47Z","closed_at":"2021-07-22T10:41:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2694","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2694.patch"},"body":"Before: copy\/paste resulted in an error because the square bracket\r\ncharacters `[]` are special characters in bash","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2693\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693","id":949797014,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDQ1ODAz","number":2693,"title":"Fix OSCAR Esperanto","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T14:43:50Z","updated_at":"2021-07-21T14:53:52Z","closed_at":"2021-07-21T14:53:51Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2693","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2693.patch"},"body":"The Esperanto part (original) of OSCAR has the wrong number of examples:\r\n```python\r\nfrom datasets import load_dataset\r\nraw_datasets = load_dataset(\"oscar\", \"unshuffled_original_eo\")\r\n```\r\nraises\r\n```python\r\nNonMatchingSplitsSizesError:\r\n[{'expected': SplitInfo(name='train', num_bytes=314188336, num_examples=121171, dataset_name='oscar'),\r\n'recorded': SplitInfo(name='train', num_bytes=314064514, num_examples=121168, dataset_name='oscar')}]\r\n```\r\n\r\nI updated the number of expected examples in dataset_infos.json\r\n\r\ncc @sgugger ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2692\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692","id":949765484,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0NDE4MDg1","number":2692,"title":"Update BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-21T14:23:35Z","updated_at":"2021-07-21T15:31:41Z","closed_at":"2021-07-21T15:31:40Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2692","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2692.patch"},"body":"Update BibTeX entry","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2691\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2691","id":949758379,"node_id":"MDU6SXNzdWU5NDk3NTgzNzk=","number":2691,"title":"xtreme \/ pan-x cannot be downloaded","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-07-21T14:18:05Z","updated_at":"2021-07-26T09:34:22Z","closed_at":"2021-07-26T09:34:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nDataset xtreme \/ pan-x cannot be loaded\r\n\r\nSeems related to https:\/\/github.com\/huggingface\/datasets\/pull\/2326\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\ndataset = load_dataset(\"xtreme\", \"PAN-X.fr\")\r\n```\r\n\r\n## Expected results\r\n\r\nLoad the dataset\r\n\r\n## Actual results\r\n\r\n```\r\nFileNotFoundError: Couldn't find file at https:\/\/www.dropbox.com\/s\/12h3qqog6q4bjve\/panx_dataset.tar?dl=1\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: macOS-11.4-x86_64-i386-64bit\r\n- Python version: 3.8.11\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2690\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690","id":949574500,"node_id":"MDExOlB1bGxSZXF1ZXN0Njk0MjU5MDc1","number":2690,"title":"Docs details","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-21T10:43:14Z","updated_at":"2021-07-27T18:40:54Z","closed_at":"2021-07-27T18:40:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2690","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2690.patch"},"body":"Some comments here:\r\n\r\n- the code samples assume the expected libraries have already been installed. Maybe add a section at start, or add it to every code sample. Something like `pip install datasets transformers torch 'datasets[streaming]'` (maybe just link to https:\/\/huggingface.co\/docs\/datasets\/installation.html + a one-liner that installs all the requirements \/ alternatively a requirements.txt file)\r\n- \"If you\u2019d like to play with the examples, you must install it from source.\" in https:\/\/huggingface.co\/docs\/datasets\/installation.html: it's not clear to me what this means (what are these \"examples\"?)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html: \"or AWS bucket if it\u2019s not already stored in the library\". It's the only place in the doc (aside from the docstring https:\/\/huggingface.co\/docs\/datasets\/package_reference\/loading_methods.html?highlight=aws bucket#datasets.list_datasets) where the \"AWS bucket\" is mentioned. It's not easy to understand what this means. Maybe explain more, and link to https:\/\/s3.amazonaws.com\/datasets.huggingface.co and\/or https:\/\/huggingface.co\/docs\/datasets\/filesystems.html.\r\n- example in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#manually-downloading-files is obsoleted by https:\/\/github.com\/huggingface\/datasets\/pull\/2326. Also: see https:\/\/github.com\/huggingface\/datasets\/issues\/2691 for a bug on this specific dataset.\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#manually-downloading-files the doc says \"After you\u2019ve downloaded the files, you can point to the folder hosting them locally with the data_dir argument as follows:\", but the following example does not show how to use `data_dir`\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#csv-files, it would be nice to have an URL to the csv loader reference (but I'm not sure there is one in the API reference). This comment applies in many places in the doc: I would want the API reference to contain doc for all the code\/functions\/classes... and I would want a lot more links inside the doc pointing to the API entries.\r\n- in the API reference (docstrings) I would prefer \"SOURCE\" to link to github instead of a copy of the code inside the docs site (eg. https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/load.py#L711 instead of https:\/\/huggingface.co\/docs\/datasets\/_modules\/datasets\/load.html#load_dataset)\r\n- it seems like not all the API is exposed in the doc. For example, there is no doc for [`disable_progress_bar`](https:\/\/github.com\/huggingface\/datasets\/search?q=disable_progress_bar), see https:\/\/huggingface.co\/docs\/datasets\/search.html?q=disable_progress_bar, even if the code contains docstrings. Does it mean that the function is not officially supported? (otherwise, maybe it also deserves a mention in https:\/\/huggingface.co\/docs\/datasets\/package_reference\/logging_methods.html)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html?highlight=most%20efficient%20format%20have%20json%20files%20consisting%20multiple%20json%20objects#json-files, \"The most efficient format is to have JSON files consisting of multiple JSON objects, one per line, representing individual data rows:\", maybe link to https:\/\/en.wikipedia.org\/wiki\/JSON_streaming#Line-delimited_JSON and give it a name (\"line-delimited JSON\"? \"JSON Lines\" as in https:\/\/huggingface.co\/docs\/datasets\/processing.html#exporting-a-dataset-to-csv-json-parquet-or-to-python-objects ?)\r\n- in https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html, for the local files sections, it would be nice to provide sample csv \/ json \/ text files to download, so that it's easier for the reader to try to load them (instead: they won't try)\r\n- the doc explains how to shard a dataset, but does not explain why and when a dataset should be sharded (I have no idea... for [parallelizing](https:\/\/huggingface.co\/docs\/datasets\/processing.html#multiprocessing)?). It does neither give an idea of the number of shards a dataset typically should have and why.\r\n- the code example in https:\/\/huggingface.co\/docs\/datasets\/processing.html#mapping-in-a-distributed-setting does not work, because `training_args` has not been defined before in the doc.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2689\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2689","id":949447104,"node_id":"MDU6SXNzdWU5NDk0NDcxMDQ=","number":2689,"title":"cannot save the dataset to disk after rename_column","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-21T08:13:40Z","updated_at":"2021-07-21T13:11:04Z","closed_at":"2021-07-21T13:11:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nIf you use `rename_column` and do no other modification, you will be unable to save the dataset using `save_to_disk`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# Sample code to reproduce the bug\r\nIn [1]: from datasets import Dataset, load_from_disk\r\nIn [5]: dataset=Dataset.from_dict({'foo': [0]})\r\nIn [7]: dataset.save_to_disk('foo')\r\nIn [8]: dataset=load_from_disk('foo')\r\nIn [10]: dataset=dataset.rename_column('foo', 'bar')\r\nIn [11]: dataset.save_to_disk('foo')\r\n---------------------------------------------------------------------------\r\nPermissionError Traceback (most recent call last)\r\n in \r\n----> 1 dataset.save_to_disk('foo')\r\n\r\n\/mnt\/beegfs\/projects\/meerqat\/anaconda3\/envs\/meerqat\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in save_to_disk(self, dataset_path\r\n, fs)\r\n 597 if Path(dataset_path, config.DATASET_ARROW_FILENAME) in cache_files_paths:\r\n 598 raise PermissionError(\r\n--> 599 f\"Tried to overwrite {Path(dataset_path, config.DATASET_ARROW_FILENAME)} but a dataset can't overwrite itself.\"\r\n 600 )\r\n 601 if Path(dataset_path, config.DATASET_INDICES_FILENAME) in cache_files_paths:\r\n\r\nPermissionError: Tried to overwrite foo\/dataset.arrow but a dataset can't overwrite itself.\r\n```\r\n\r\nN. B. I created the dataset from dict to enable easy reproduction but the same happens if you load an existing dataset (e.g. starting from `In [8]`)\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-3.10.0-1160.11.1.el7.x86_64-x86_64-with-centos-7.9.2009-Core\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2688\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2688","id":949182074,"node_id":"MDU6SXNzdWU5NDkxODIwNzQ=","number":2688,"title":"hebrew language codes he and iw should be treated as aliases","user":{"login":"eyaler","id":4436747,"node_id":"MDQ6VXNlcjQ0MzY3NDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4436747?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eyaler","html_url":"https:\/\/github.com\/eyaler","followers_url":"https:\/\/api.github.com\/users\/eyaler\/followers","following_url":"https:\/\/api.github.com\/users\/eyaler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eyaler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eyaler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eyaler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eyaler\/orgs","repos_url":"https:\/\/api.github.com\/users\/eyaler\/repos","events_url":"https:\/\/api.github.com\/users\/eyaler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eyaler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-20T23:13:52Z","updated_at":"2021-07-21T16:34:53Z","closed_at":"2021-07-21T16:34:53Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"https:\/\/huggingface.co\/datasets\/mc4 not listed when searching for hebrew datasets (he) as it uses the older language code iw, preventing discoverability. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2687\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687","id":948890481,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNjY1NDI2","number":2687,"title":"Minor documentation fix","user":{"login":"slowwavesleep","id":44175589,"node_id":"MDQ6VXNlcjQ0MTc1NTg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44175589?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slowwavesleep","html_url":"https:\/\/github.com\/slowwavesleep","followers_url":"https:\/\/api.github.com\/users\/slowwavesleep\/followers","following_url":"https:\/\/api.github.com\/users\/slowwavesleep\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slowwavesleep\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slowwavesleep\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slowwavesleep\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slowwavesleep\/orgs","repos_url":"https:\/\/api.github.com\/users\/slowwavesleep\/repos","events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T17:43:23Z","updated_at":"2021-07-21T13:04:55Z","closed_at":"2021-07-21T13:04:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2687","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2687.patch"},"body":"Currently, [Writing a dataset loading script](https:\/\/huggingface.co\/docs\/datasets\/add_dataset.html) page has a small error. A link to `matinf` dataset in [_Dataset scripts of reference_](https:\/\/huggingface.co\/docs\/datasets\/add_dataset.html#dataset-scripts-of-reference) section actually leads to `xsquad`, instead. This PR fixes that. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2686\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686","id":948811669,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTk4OTE3","number":2686,"title":"Fix bad config ids that name cache directories","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T16:00:45Z","updated_at":"2021-07-20T16:27:15Z","closed_at":"2021-07-20T16:27:15Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2686","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2686.patch"},"body":"`data_dir=None` was considered a dataset config parameter, hence creating a special config_id for all dataset being loaded.\r\nSince the config_id is used to name the cache directories, this leaded to datasets being regenerated for users.\r\n\r\nI fixed this by ignoring the value of `data_dir` when it's `None` when computing the config_id.\r\nI also added a test to make sure the cache directories are not unexpectedly renamed in the future.\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2683","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2685\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2685","id":948791572,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTgxNTk2","number":2685,"title":"Fix Blog Authorship Corpus dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-20T15:44:50Z","updated_at":"2021-07-21T13:11:58Z","closed_at":"2021-07-21T13:11:58Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2685","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2685","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2685.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2685.patch"},"body":"This PR:\r\n- Update the JSON metadata file, which previously was raising a `NonMatchingSplitsSizesError`\r\n- Fix the codec of the data files (`latin_1` instead of `utf-8`), which previously was raising ` UnicodeDecodeError` for some files\r\n\r\nClose #2679.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2684\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2684","id":948771753,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTY0MDY4","number":2684,"title":"Print absolute local paths in load_dataset error messages","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T15:28:28Z","updated_at":"2021-07-22T20:48:19Z","closed_at":"2021-07-22T14:01:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2684","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2684","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2684.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2684.patch"},"body":"Use absolute local paths in the error messages of `load_dataset` as per @stas00's suggestion in https:\/\/github.com\/huggingface\/datasets\/pull\/2500#issuecomment-874891223 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2683\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2683","id":948721379,"node_id":"MDU6SXNzdWU5NDg3MjEzNzk=","number":2683,"title":"Cache directories changed due to recent changes in how config kwargs are handled","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-20T14:37:57Z","updated_at":"2021-07-20T16:27:15Z","closed_at":"2021-07-20T16:27:15Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Since #2659 I can see weird cache directory names with hashes in the config id, even though no additional config kwargs are passed. For example:\r\n\r\n```python\r\nfrom datasets import load_dataset_builder\r\n\r\nc4_builder = load_dataset_builder(\"c4\", \"en\")\r\nprint(c4_builder.cache_dir)\r\n# \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/c4\/en-174d3b7155eb68db\/0.0.0\/...\r\n\r\n# instead of \r\n# \/Users\/quentinlhoest\/.cache\/huggingface\/datasets\/c4\/en\/0.0.0\/...\r\n```\r\nThis issue could be annoying since it would simply ignore old cache directories for users, and regenerate datasets\r\n\r\ncc @stas00 this is what you experienced a few days ago\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2682\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2682","id":948713137,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNTE2NjU2","number":2682,"title":"Fix c4 expected files","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T14:29:31Z","updated_at":"2021-07-20T14:38:11Z","closed_at":"2021-07-20T14:38:10Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2682","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2682","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2682.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2682.patch"},"body":"Some files were not registered in the list of expected files to download\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2677","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2681\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2681","id":948708645,"node_id":"MDU6SXNzdWU5NDg3MDg2NDU=","number":2681,"title":"5 duplicate datasets","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-20T14:25:00Z","updated_at":"2021-07-20T15:44:17Z","closed_at":"2021-07-20T15:44:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nIn 5 cases, I could find a dataset on Paperswithcode which references two Hugging Face datasets as dataset loaders. They are:\r\n\r\n- https:\/\/paperswithcode.com\/dataset\/multinli -> https:\/\/huggingface.co\/datasets\/multi_nli and https:\/\/huggingface.co\/datasets\/multi_nli_mismatch\r\n \r\n \"Capture\r\n\r\n- https:\/\/paperswithcode.com\/dataset\/squad -> https:\/\/huggingface.co\/datasets\/squad and https:\/\/huggingface.co\/datasets\/squad_v2\r\n- https:\/\/paperswithcode.com\/dataset\/narrativeqa -> https:\/\/huggingface.co\/datasets\/narrativeqa and https:\/\/huggingface.co\/datasets\/narrativeqa_manual\r\n- https:\/\/paperswithcode.com\/dataset\/hate-speech-and-offensive-language -> https:\/\/huggingface.co\/datasets\/hate_offensive and https:\/\/huggingface.co\/datasets\/hate_speech_offensive\r\n- https:\/\/paperswithcode.com\/dataset\/newsph-nli -> https:\/\/huggingface.co\/datasets\/newsph and https:\/\/huggingface.co\/datasets\/newsph_nli\r\n\r\nPossible solutions:\r\n- don't fix (it works)\r\n- for each pair of duplicate datasets, remove one, and create an alias to the other.\r\n\r\n## Steps to reproduce the bug\r\n\r\nVisit the Paperswithcode links, and look at the \"Dataset Loaders\" section\r\n\r\n## Expected results\r\n\r\nThere should only be one reference to a Hugging Face dataset loader\r\n\r\n## Actual results\r\n\r\nTwo Hugging Face dataset loaders\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2680\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2680","id":948649716,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkzNDYyNzY3","number":2680,"title":"feat: \ud83c\udfb8 add paperswithcode id for qasper dataset","user":{"login":"severo","id":1676121,"node_id":"MDQ6VXNlcjE2NzYxMjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1676121?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/severo","html_url":"https:\/\/github.com\/severo","followers_url":"https:\/\/api.github.com\/users\/severo\/followers","following_url":"https:\/\/api.github.com\/users\/severo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/severo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/severo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/severo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/severo\/orgs","repos_url":"https:\/\/api.github.com\/users\/severo\/repos","events_url":"https:\/\/api.github.com\/users\/severo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/severo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-20T13:22:29Z","updated_at":"2021-07-20T14:04:10Z","closed_at":"2021-07-20T14:04:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2680","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2680","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2680.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2680.patch"},"body":"The reverse reference exists on paperswithcode:\r\nhttps:\/\/paperswithcode.com\/dataset\/qasper","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2679\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2679","id":948506638,"node_id":"MDU6SXNzdWU5NDg1MDY2Mzg=","number":2679,"title":"Cannot load the blog_authorship_corpus due to codec errors","user":{"login":"izaskr","id":38069449,"node_id":"MDQ6VXNlcjM4MDY5NDQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38069449?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/izaskr","html_url":"https:\/\/github.com\/izaskr","followers_url":"https:\/\/api.github.com\/users\/izaskr\/followers","following_url":"https:\/\/api.github.com\/users\/izaskr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/izaskr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/izaskr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/izaskr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/izaskr\/orgs","repos_url":"https:\/\/api.github.com\/users\/izaskr\/repos","events_url":"https:\/\/api.github.com\/users\/izaskr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/izaskr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-07-20T10:13:20Z","updated_at":"2021-07-21T17:02:21Z","closed_at":"2021-07-21T13:11:58Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nA codec error is raised while loading the blog_authorship_corpus. \r\n\r\n## Steps to reproduce the bug\r\n```\r\nfrom datasets import load_dataset\r\nraw_datasets = load_dataset(\"blog_authorship_corpus\")\r\n```\r\n\r\n\r\n## Expected results\r\nLoading the dataset without errors.\r\n\r\n## Actual results\r\nAn error similar to the one below was raised for (what seems like) every XML file.\r\n\/home\/izaskr\/.cache\/huggingface\/datasets\/downloads\/extracted\/7cf52524f6517e168604b41c6719292e8f97abbe8f731e638b13423f4212359a\/blogs\/788358.male.24.Arts.Libra.xml cannot be loaded. Error message: 'utf-8' codec can't decode byte 0xe7 in position 7551: invalid continuation byte\r\n\r\nTraceback (most recent call last): \r\n File \"\", line 1, in \r\n File \"\/home\/izaskr\/anaconda3\/envs\/local_vae_older\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 856, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/izaskr\/anaconda3\/envs\/local_vae_older\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 583, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/izaskr\/anaconda3\/envs\/local_vae_older\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 671, in _download_and_prepare\r\n verify_splits(self.info.splits, split_dict)\r\n File \"\/home\/izaskr\/anaconda3\/envs\/local_vae_older\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 74, in verify_splits\r\n raise NonMatchingSplitsSizesError(str(bad_splits))\r\ndatasets.utils.info_utils.NonMatchingSplitsSizesError: [{'expected': SplitInfo(name='train', num_bytes=610252351, num_examples=532812, dataset_name='blog_authorship_corpus'), 'recorded': SplitInfo(name='train', num_bytes=614706451, num_examples=535568, dataset_name='blog_authorship_corpus')}, {'expected': SplitInfo(name='validation', num_bytes=37500394, num_examples=31277, dataset_name='blog_authorship_corpus'), 'recorded': SplitInfo(name='validation', num_bytes=32553710, num_examples=28521, dataset_name='blog_authorship_corpus')}]\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-4.15.0-132-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.8\r\n- PyArrow version: 4.0.1\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2678\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2678","id":948471222,"node_id":"MDU6SXNzdWU5NDg0NzEyMjI=","number":2678,"title":"Import Error in Kaggle notebook","user":{"login":"prikmm","id":47216475,"node_id":"MDQ6VXNlcjQ3MjE2NDc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47216475?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/prikmm","html_url":"https:\/\/github.com\/prikmm","followers_url":"https:\/\/api.github.com\/users\/prikmm\/followers","following_url":"https:\/\/api.github.com\/users\/prikmm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/prikmm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/prikmm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/prikmm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/prikmm\/orgs","repos_url":"https:\/\/api.github.com\/users\/prikmm\/repos","events_url":"https:\/\/api.github.com\/users\/prikmm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/prikmm\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-20T09:28:38Z","updated_at":"2021-07-21T13:59:26Z","closed_at":"2021-07-21T13:03:02Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nNot able to import datasets library in kaggle notebooks\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n!pip install datasets\r\nimport datasets\r\n```\r\n\r\n## Expected results\r\nNo such error\r\n\r\n## Actual results\r\n```\r\nImportError Traceback (most recent call last)\r\n in \r\n----> 1 import datasets\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/__init__.py in \r\n 31 )\r\n 32 \r\n---> 33 from .arrow_dataset import Dataset, concatenate_datasets\r\n 34 from .arrow_reader import ArrowReader, ReadInstruction\r\n 35 from .arrow_writer import ArrowWriter\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in \r\n 36 import pandas as pd\r\n 37 import pyarrow as pa\r\n---> 38 import pyarrow.compute as pc\r\n 39 from multiprocess import Pool, RLock\r\n 40 from tqdm.auto import tqdm\r\n\r\n\/opt\/conda\/lib\/python3.7\/site-packages\/pyarrow\/compute.py in \r\n 16 # under the License.\r\n 17 \r\n---> 18 from pyarrow._compute import ( # noqa\r\n 19 Function,\r\n 20 FunctionOptions,\r\n\r\nImportError: \/opt\/conda\/lib\/python3.7\/site-packages\/pyarrow\/_compute.cpython-37m-x86_64-linux-gnu.so: undefined symbol: _ZNK5arrow7compute15KernelSignature8ToStringEv\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Kaggle\r\n- Python version: 3.7.10\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2677\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2677","id":948429788,"node_id":"MDU6SXNzdWU5NDg0Mjk3ODg=","number":2677,"title":"Error when downloading C4","user":{"login":"Aktsvigun","id":36672861,"node_id":"MDQ6VXNlcjM2NjcyODYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36672861?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Aktsvigun","html_url":"https:\/\/github.com\/Aktsvigun","followers_url":"https:\/\/api.github.com\/users\/Aktsvigun\/followers","following_url":"https:\/\/api.github.com\/users\/Aktsvigun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Aktsvigun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Aktsvigun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Aktsvigun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Aktsvigun\/orgs","repos_url":"https:\/\/api.github.com\/users\/Aktsvigun\/repos","events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Aktsvigun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-07-20T08:37:30Z","updated_at":"2021-07-20T14:41:31Z","closed_at":"2021-07-20T14:38:10Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\nI am trying to download `en` corpus from C4 dataset. However, I get an error caused by validation files download (see image). My code is very primitive:\r\n`datasets.load_dataset('c4', 'en')`\r\n\r\nIs this a bug or do I have some configurations missing on my server? \r\nThanks!\r\n\r\n\r\n\"\u0421\u043d\u0438\u043c\u043e\u043a","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2676\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2676","id":947734909,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyNjc2NTg5","number":2676,"title":"Increase json reader block_size automatically","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T14:51:14Z","updated_at":"2021-07-19T17:51:39Z","closed_at":"2021-07-19T17:51:38Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2676","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2676","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2676.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2676.patch"},"body":"Currently some files can't be read with the default parameters of the JSON lines reader.\r\nFor example this one:\r\nhttps:\/\/huggingface.co\/datasets\/thomwolf\/codeparrot\/resolve\/main\/file-000000000006.json.gz\r\n\r\nraises a pyarrow error:\r\n```python\r\nArrowInvalid: straddling object straddles two block boundaries (try to increase block size?)\r\n```\r\n\r\nThe block size that is used is the default one by pyarrow (related to this [jira issue](https:\/\/issues.apache.org\/jira\/browse\/ARROW-9612)).\r\n\r\nTo fix this issue I changed the block_size to increase automatically if there is a straddling issue when parsing a batch of json lines.\r\n\r\nBy default the value is `chunksize \/\/ 32` in order to leverage multithreading, and it doubles every time a straddling issue occurs. The block_size is then reset for each file.\r\n\r\ncc @thomwolf @albertvillanova ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2675\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2675","id":947657732,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyNjEwNTA1","number":2675,"title":"Parallelize ETag requests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T13:30:42Z","updated_at":"2021-07-19T19:33:25Z","closed_at":"2021-07-19T19:33:25Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2675","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2675","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2675.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2675.patch"},"body":"Since https:\/\/github.com\/huggingface\/datasets\/pull\/2628 we use the ETag or the remote data files to compute the directory in the cache where a dataset is saved. This is useful in order to reload the dataset from the cache only if the remote files haven't changed.\r\n\r\nIn this I made the ETag requests parallel using multithreading. There is also a tqdm progress bar that shows up if there are more than 16 data files.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2674\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2674","id":947338202,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyMzMzODU3","number":2674,"title":"Fix sacrebleu parameter name","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T07:07:26Z","updated_at":"2021-07-19T08:07:03Z","closed_at":"2021-07-19T08:07:03Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2674","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2674","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2674.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2674.patch"},"body":"DONE:\r\n- Fix parameter name: `smooth` to `smooth_method`.\r\n- Improve kwargs description.\r\n- Align docs on using a metric.\r\n- Add example of passing additional arguments in using metrics.\r\n\r\nRelated to #2669.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2673\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2673","id":947300008,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyMzAxMTgw","number":2673,"title":"Fix potential DuplicatedKeysError in SQuAD","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T06:08:00Z","updated_at":"2021-07-19T07:08:03Z","closed_at":"2021-07-19T07:08:03Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2673","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2673","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2673.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2673.patch"},"body":"DONE:\r\n- Fix potential DiplicatedKeysError by ensuring keys are unique.\r\n- Align examples in the docs with SQuAD code.\r\n\r\nWe should promote as a good practice, that the keys should be programmatically generated as unique, instead of read from data (which might be not unique).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2672\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2672","id":947294605,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyMjk2NDQ4","number":2672,"title":"Fix potential DuplicatedKeysError in LibriSpeech","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-19T06:00:49Z","updated_at":"2021-07-19T06:28:57Z","closed_at":"2021-07-19T06:28:56Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2672","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2672","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2672.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2672.patch"},"body":"DONE:\r\n- Fix unnecessary path join.\r\n- Fix potential DiplicatedKeysError by ensuring keys are unique.\r\n\r\nWe should promote as a good practice, that the keys should be programmatically generated as unique, instead of read from data (which might be not unique).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2671\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2671","id":947273875,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkyMjc5MTM0","number":2671,"title":"Mesinesp development and training data sets have been added.","user":{"login":"aslihanuysall","id":32900185,"node_id":"MDQ6VXNlcjMyOTAwMTg1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32900185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aslihanuysall","html_url":"https:\/\/github.com\/aslihanuysall","followers_url":"https:\/\/api.github.com\/users\/aslihanuysall\/followers","following_url":"https:\/\/api.github.com\/users\/aslihanuysall\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aslihanuysall\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aslihanuysall\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aslihanuysall\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aslihanuysall\/orgs","repos_url":"https:\/\/api.github.com\/users\/aslihanuysall\/repos","events_url":"https:\/\/api.github.com\/users\/aslihanuysall\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aslihanuysall\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-19T05:14:38Z","updated_at":"2021-07-19T07:32:28Z","closed_at":"2021-07-19T06:45:50Z","author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2671","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2671","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2671.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2671.patch"},"body":"https:\/\/zenodo.org\/search?page=1&size=20&q=mesinesp, Mesinesp has Medical Semantic Indexed records in Spanish. Indexing is done using DeCS codes, a sort of Spanish equivalent to MeSH terms.\r\nThe Mesinesp (Spanish BioASQ track, see https:\/\/temu.bsc.es\/mesinesp) development set has a total of 750 records.\r\nThe Mesinesp (Spanish BioASQ track, see https:\/\/temu.bsc.es\/mesinesp) training set has a total of 369,368 records. \r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2670\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2670","id":947120709,"node_id":"MDU6SXNzdWU5NDcxMjA3MDk=","number":2670,"title":"Using sharding to parallelize indexing","user":{"login":"ggdupont","id":5583410,"node_id":"MDQ6VXNlcjU1ODM0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5583410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ggdupont","html_url":"https:\/\/github.com\/ggdupont","followers_url":"https:\/\/api.github.com\/users\/ggdupont\/followers","following_url":"https:\/\/api.github.com\/users\/ggdupont\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ggdupont\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ggdupont\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ggdupont\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ggdupont\/orgs","repos_url":"https:\/\/api.github.com\/users\/ggdupont\/repos","events_url":"https:\/\/api.github.com\/users\/ggdupont\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ggdupont\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-18T21:26:26Z","updated_at":"2021-07-18T21:26:26Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nCreating an elasticsearch index on large dataset could be quite and cannot be parallelized on shard (the index creation is colliding)\r\n\r\n**Describe the solution you'd like**\r\nWhen working on dataset shards, if an index already exists, its mapping should be checked and if compatible, the indexing process should continue with the shard data. \r\n\r\nAdditionally, at the end of the process, the `_indexes` dict should be send back to the original dataset object (from which the shards have been created) to allow to use the index for later filtering on the whole dataset.\r\n\r\n**Describe alternatives you've considered**\r\nEach dataset shard could created independent partial indices. then on the whole dataset level, indices should be all referred in `_indexes` dict and be used in querying through `get_nearest_examples()`. The drawback is that the scores will be computed independently on the partial indices leading to inconsistent values for most scoring based on corpus level statistics (tf\/idf, BM25).\r\n\r\n**Additional context**\r\nThe objectives is to parallelize the index creation to speed-up the process (ie surcharging the ES server which is fine to handle large load) while later enabling search on the whole dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2669\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2669","id":946982998,"node_id":"MDU6SXNzdWU5NDY5ODI5OTg=","number":2669,"title":"Metric kwargs are not passed to underlying external metric f1_score","user":{"login":"BramVanroy","id":2779410,"node_id":"MDQ6VXNlcjI3Nzk0MTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2779410?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BramVanroy","html_url":"https:\/\/github.com\/BramVanroy","followers_url":"https:\/\/api.github.com\/users\/BramVanroy\/followers","following_url":"https:\/\/api.github.com\/users\/BramVanroy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BramVanroy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BramVanroy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BramVanroy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BramVanroy\/orgs","repos_url":"https:\/\/api.github.com\/users\/BramVanroy\/repos","events_url":"https:\/\/api.github.com\/users\/BramVanroy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BramVanroy\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-18T08:32:31Z","updated_at":"2021-07-18T18:36:05Z","closed_at":"2021-07-18T11:19:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen I want to use F1 score with average=\"min\", this keyword argument does not seem to be passed through to the underlying sklearn metric. This is evident because [sklearn](https:\/\/scikit-learn.org\/stable\/modules\/generated\/sklearn.metrics.f1_score.html) throws an error telling me so.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\nf1 = datasets.load_metric(\"f1\", keep_in_memory=True, average=\"min\")\r\nf1.add_batch(predictions=[0,2,3], references=[1, 2, 3])\r\nf1.compute()\r\n```\r\n\r\n## Expected results\r\nNo error, because `average=\"min\"` should be passed correctly to f1_score in sklearn.\r\n\r\n## Actual results\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\datasets\\metric.py\", line 402, in compute\r\n output = self._compute(predictions=predictions, references=references, **kwargs)\r\n File \"C:\\Users\\bramv\\.cache\\huggingface\\modules\\datasets_modules\\metrics\\f1\\82177930a325d4c28342bba0f116d73f6d92fb0c44cd67be32a07c1262b61cfe\\f1.py\", line 97, in _compute\r\n \"f1\": f1_score(\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\utils\\validation.py\", line 63, in inner_f\r\n return f(*args, **kwargs)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\metrics\\_classification.py\", line 1071, in f1_score\r\n return fbeta_score(y_true, y_pred, beta=1, labels=labels,\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\utils\\validation.py\", line 63, in inner_f\r\n return f(*args, **kwargs)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\metrics\\_classification.py\", line 1195, in fbeta_score\r\n _, _, f, _ = precision_recall_fscore_support(y_true, y_pred,\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\utils\\validation.py\", line 63, in inner_f\r\n return f(*args, **kwargs)\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\metrics\\_classification.py\", line 1464, in precision_recall_fscore_support\r\n labels = _check_set_wise_labels(y_true, y_pred, average, labels,\r\n File \"C:\\Users\\bramv\\.virtualenvs\\pipeline-TpEsXVex\\lib\\site-packages\\sklearn\\metrics\\_classification.py\", line 1294, in _check_set_wise_labels\r\n raise ValueError(\"Target is %s but average='binary'. Please \"\r\nValueError: Target is multiclass but average='binary'. Please choose another average setting, one of [None, 'micro', 'macro', 'weighted'].\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.9.2\r\n- PyArrow version: 4.0.1","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2668\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2668","id":946867622,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxOTY1MTY1","number":2668,"title":"Add Russian SuperGLUE","user":{"login":"slowwavesleep","id":44175589,"node_id":"MDQ6VXNlcjQ0MTc1NTg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44175589?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slowwavesleep","html_url":"https:\/\/github.com\/slowwavesleep","followers_url":"https:\/\/api.github.com\/users\/slowwavesleep\/followers","following_url":"https:\/\/api.github.com\/users\/slowwavesleep\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slowwavesleep\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slowwavesleep\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slowwavesleep\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slowwavesleep\/orgs","repos_url":"https:\/\/api.github.com\/users\/slowwavesleep\/repos","events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slowwavesleep\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-17T17:41:28Z","updated_at":"2021-07-29T11:50:31Z","closed_at":"2021-07-29T11:50:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2668","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2668","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2668.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2668.patch"},"body":"Hi,\r\n\r\nThis adds the [Russian SuperGLUE](https:\/\/russiansuperglue.com\/) dataset. For the most part I reused the code for the original SuperGLUE, although there are some relatively minor differences in the structure that I accounted for.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2667\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2667","id":946861908,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxOTYwNzc3","number":2667,"title":"Use tqdm from tqdm_utils","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-17T17:06:35Z","updated_at":"2021-07-19T17:39:10Z","closed_at":"2021-07-19T17:32:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2667","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2667","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2667.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2667.patch"},"body":"This PR replaces `tqdm` from the `tqdm` lib with `tqdm` from `datasets.utils.tqdm_utils`. With this change, it's possible to disable progress bars just by calling `disable_progress_bar`. Note this doesn't work on Windows when using multiprocessing due to how global variables are shared between processes. Currently, there is no easy way to disable progress bars in a multiprocess setting on Windows (patching logging with `datasets.utils.logging.get_verbosity = lambda: datasets.utils.logging.NOTSET` doesn't seem to work as well), so adding support for this is a future goal. Additionally, this PR adds a unit (\"ba\" for batches) to the bar printed by `Dataset.to_json` (this change is motivated by https:\/\/github.com\/huggingface\/datasets\/issues\/2657).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2666\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2666","id":946825140,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxOTMzMDM1","number":2666,"title":"Adds CodeClippy dataset [WIP]","user":{"login":"arampacha","id":69807323,"node_id":"MDQ6VXNlcjY5ODA3MzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69807323?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arampacha","html_url":"https:\/\/github.com\/arampacha","followers_url":"https:\/\/api.github.com\/users\/arampacha\/followers","following_url":"https:\/\/api.github.com\/users\/arampacha\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arampacha\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arampacha\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arampacha\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arampacha\/orgs","repos_url":"https:\/\/api.github.com\/users\/arampacha\/repos","events_url":"https:\/\/api.github.com\/users\/arampacha\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arampacha\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-17T13:32:04Z","updated_at":"2021-07-19T09:09:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2666","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2666","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2666.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2666.patch"},"body":"CodeClippy is an opensource code dataset scrapped from github during flax-jax-community-week\r\nhttps:\/\/the-eye.eu\/public\/AI\/training_data\/code_clippy_data\/","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2665\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2665","id":946822036,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxOTMwNjky","number":2665,"title":"Adds APPS dataset to the hub [WIP]","user":{"login":"arampacha","id":69807323,"node_id":"MDQ6VXNlcjY5ODA3MzIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/69807323?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arampacha","html_url":"https:\/\/github.com\/arampacha","followers_url":"https:\/\/api.github.com\/users\/arampacha\/followers","following_url":"https:\/\/api.github.com\/users\/arampacha\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arampacha\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arampacha\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arampacha\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arampacha\/orgs","repos_url":"https:\/\/api.github.com\/users\/arampacha\/repos","events_url":"https:\/\/api.github.com\/users\/arampacha\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arampacha\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-17T13:13:17Z","updated_at":"2021-07-17T17:56:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2665","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2665","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2665.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2665.patch"},"body":"A loading script for [APPS dataset](https:\/\/github.com\/hendrycks\/apps) ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2663\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2663","id":946552273,"node_id":"MDU6SXNzdWU5NDY1NTIyNzM=","number":2663,"title":"[`to_json`] add multi-proc sharding support","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-16T19:41:50Z","updated_at":"2021-08-03T21:29:21Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"As discussed on slack it appears that `to_json` is quite slow on huge datasets like OSCAR.\r\n\r\nI implemented sharded saving, which is much much faster - but the tqdm bars all overwrite each other, so it's hard to make sense of the progress, so if possible ideally this multi-proc support could be implemented internally in `to_json` via `num_proc` argument. I guess `num_proc` will be the number of shards?\r\n\r\nI think the user will need to use this feature wisely, since too many processes writing to say normal style HD is likely to be slower than one process.\r\n\r\nI'm not sure whether the user should be responsible to concatenate the shards at the end or `datasets`, either way works for my needs.\r\n\r\nThe code I was using:\r\n\r\n```\r\nfrom multiprocessing import cpu_count, Process, Queue\r\n\r\n[...]\r\n\r\nfiltered_dataset = concat_dataset.map(filter_short_documents, batched=True, batch_size=256, num_proc=cpu_count())\r\n\r\nDATASET_NAME = \"oscar\"\r\nSHARDS = 10\r\ndef process_shard(idx):\r\n print(f\"Sharding {idx}\")\r\n ds_shard = filtered_dataset.shard(SHARDS, idx, contiguous=True)\r\n # ds_shard = ds_shard.shuffle() # remove contiguous=True above if shuffling\r\n print(f\"Saving {DATASET_NAME}-{idx}.jsonl\")\r\n ds_shard.to_json(f\"{DATASET_NAME}-{idx}.jsonl\", orient=\"records\", lines=True, force_ascii=False)\r\n\r\nqueue = Queue()\r\nprocesses = [Process(target=process_shard, args=(idx,)) for idx in range(SHARDS)]\r\nfor p in processes:\r\n p.start()\r\n\r\nfor p in processes:\r\n p.join()\r\n```\r\n\r\nThank you!\r\n\r\n@lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2662\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2662","id":946470815,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxNjM5MjU5","number":2662,"title":"Load Dataset from the Hub (NO DATASET SCRIPT)","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-16T17:21:58Z","updated_at":"2021-07-29T09:27:44Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2662","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2662","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2662.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2662.patch"},"body":"## Load the data from any Dataset repository on the Hub\r\n\r\nThis PR adds support for loading datasets from any dataset repository on the hub, without requiring any dataset script.\r\n\r\nAs a user it's now possible to create a repo and upload some csv\/json\/text\/parquet files, and then be able to load the data in one line. Here is an example with the `allenai\/c4` repository that contains a lot of compressed json lines files:\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndata_files = {\"train\": \"en\/c4-train.*.json.gz\"}\r\nc4 = load_dataset(\"allenai\/c4\", data_files=data_files, split=\"train\", streaming=True)\r\n\r\nprint(c4.n_shards)\r\n# 1024\r\nprint(next(iter(c4)))\r\n# {'text': 'Beginners BBQ Class Takin...'}\r\n```\r\n\r\nBy default it loads all the files, but as shown in the example you can choose the ones you want with unix style patterns.\r\n\r\nOf course it's still possible to use dataset scripts since they offer the most flexibility.\r\n\r\n## Implementation details\r\n\r\nIt uses `huggingface_hub` to list the files in a dataset repository.\r\n\r\nIf you provide a path to a local directory instead of a repository name, it works the same way but it uses `glob`.\r\n\r\nDepending on the data files available, or passed in the `data_files` parameter, one of the available builders will be used among the csv, json, text and parquet builders.\r\n\r\nBecause of this, it's not possible to load both csv and json files at once. In this case you have to load them separately and then concatenate the two datasets for example.\r\n\r\n## TODO\r\n\r\n- [x] tests\r\n- [ ] docs\r\n- [x] when huggingface_hub gets a new release, update the CI and the setup.py\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/2629","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2661\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2661","id":946446967,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxNjE5MzAz","number":2661,"title":"Add SD task for SUPERB","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-07-16T16:43:21Z","updated_at":"2021-08-04T17:03:53Z","closed_at":"2021-08-04T17:03:53Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2661","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2661","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2661.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2661.patch"},"body":"Include the SD (Speaker Diarization) task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051) and `s3prl` [instructions](https:\/\/github.com\/s3prl\/s3prl\/tree\/master\/s3prl\/downstream#sd-speaker-diarization).\r\n\r\nTODO:\r\n- [x] Generate the LibriMix corpus\r\n- [x] Prepare the corpus for diarization\r\n- [x] Upload these files to the superb-data repo\r\n- [x] Transcribe the corresponding s3prl processing of these files into our superb loading script\r\n- [x] README: tags + description sections\r\n- ~~Add DER metric~~ (we leave the DER metric for a follow-up PR)\r\n\r\nRelated to #2619.\r\n\r\nClose #2653.\r\n\r\ncc: @lewtun ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2660\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2660","id":946316180,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxNTA4NzE0","number":2660,"title":"Move checks from _map_single to map","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-16T13:53:33Z","updated_at":"2021-07-16T13:54:20Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2660","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2660","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2660.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2660.patch"},"body":"Moves the param checks from `_map_single` to `map`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2659\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2659","id":946155407,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkxMzcwNzU3","number":2659,"title":"Allow dataset config kwargs to be None","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-16T10:25:38Z","updated_at":"2021-07-16T12:46:07Z","closed_at":"2021-07-16T12:46:07Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2659","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2659","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2659.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2659.patch"},"body":"Close https:\/\/github.com\/huggingface\/datasets\/issues\/2658\r\n\r\nThe dataset config kwargs that were set to None we simply ignored.\r\nThis was an issue when None has some meaning for certain parameters of certain builders, like the `sep` parameter of the \"csv\" builder that allows to infer to separator.\r\n\r\ncc @SBrandeis ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2658\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2658","id":946139532,"node_id":"MDU6SXNzdWU5NDYxMzk1MzI=","number":2658,"title":"Can't pass `sep=None` to load_dataset(\"csv\", ...) to infer the separator via pandas.read_csv","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-16T10:05:44Z","updated_at":"2021-07-16T12:46:06Z","closed_at":"2021-07-16T12:46:06Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"When doing `load_dataset(\"csv\", sep=None)`, the `sep` passed to `pd.read_csv` is still the default `sep=\",\"` instead, which makes it impossible to make the csv loader infer the separator.\r\n\r\nRelated to https:\/\/github.com\/huggingface\/datasets\/pull\/2656\r\n\r\ncc @SBrandeis ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2657\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2657","id":945822829,"node_id":"MDU6SXNzdWU5NDU4MjI4Mjk=","number":2657,"title":"`to_json` reporting enhancements","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-15T23:32:18Z","updated_at":"2021-07-15T23:33:53Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"While using `to_json` 2 things came to mind that would have made the experience easier on the user:\r\n\r\n1. Could we have a `desc` arg for the tqdm use and a fallback to just `to_json` so that it'd be clear to the user what's happening? Surely, one can just print the description before calling json, but I thought perhaps it'd help to have it self-identify like you did for other progress bars recently.\r\n\r\n2. It took me a while to make sense of the reported numbers:\r\n```\r\n 22%|\u2588\u2588\u258f | 1536\/7076 [12:30:57<44:09:42, 28.70s\/it]\r\n```\r\nSo iteration here happens to be 10K samples, and the total is 70M records. But the user does't know that, so the progress bar is perfect, but the numbers it reports are meaningless until one discovers that 1it=10K samples. And one still has to convert these in the head - so it's not quick. Not exactly sure what's the best way to approach this, perhaps it can be part of `desc`? or report M or K, so it'd be built-in if it were to print, e.g.:\r\n```\r\n 22%|\u2588\u2588\u258f | 15360K\/70760K [12:30:57<44:09:42, 28.70s\/it]\r\n```\r\nor \r\n```\r\n 22%|\u2588\u2588\u258f | 15.36M\/70.76M [12:30:57<44:09:42, 28.70s\/it]\r\n```\r\n(while of course remaining friendly to small datasets)\r\n\r\nI forget if tqdm lets you add a magnitude identifier to the running count.\r\n\r\nThank you!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2656\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2656","id":945421790,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkwNzUzNjA3","number":2656,"title":"Change `from_csv` default arguments","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-15T14:09:06Z","updated_at":"2021-07-16T10:23:26Z","closed_at":"2021-07-16T10:23:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2656","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2656","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2656.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2656.patch"},"body":"Passing `sep=None` to pandas's `read_csv` lets pandas guess the CSV file's separator\r\n\r\nThis PR allows users to use this pandas's feature by passing `sep=None` to `Dataset.from_csv`:\r\n\r\n```python\r\nDataset.from_csv(\r\n ...,\r\n sep=None\r\n)\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2655\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2655","id":945382723,"node_id":"MDU6SXNzdWU5NDUzODI3MjM=","number":2655,"title":"Allow the selection of multiple columns at once","user":{"login":"Dref360","id":8976546,"node_id":"MDQ6VXNlcjg5NzY1NDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8976546?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Dref360","html_url":"https:\/\/github.com\/Dref360","followers_url":"https:\/\/api.github.com\/users\/Dref360\/followers","following_url":"https:\/\/api.github.com\/users\/Dref360\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Dref360\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Dref360\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Dref360\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Dref360\/orgs","repos_url":"https:\/\/api.github.com\/users\/Dref360\/repos","events_url":"https:\/\/api.github.com\/users\/Dref360\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Dref360\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-15T13:30:45Z","updated_at":"2021-07-23T15:40:57Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nSimilar to pandas, it would be great if we could select multiple columns at once.\r\n\r\n\r\n**Describe the solution you'd like**\r\n```python\r\nmy_dataset = ... # Has columns ['idx', 'sentence', 'label']\r\nidx, label = my_dataset[['idx', 'label']]\r\n```\r\n\r\n**Describe alternatives you've considered**\r\nwe can do `[dataset[col] for col in ('idx', 'label')]`\r\n\r\n**Additional context**\r\nThis is of course very minor.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2654\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2654","id":945167231,"node_id":"MDU6SXNzdWU5NDUxNjcyMzE=","number":2654,"title":"Give a user feedback if the dataset he loads is streamable or not","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-15T09:07:27Z","updated_at":"2021-08-02T11:03:21Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nI would love to know if a `dataset` is with the current implementation streamable or not. \r\n\r\n**Describe the solution you'd like**\r\nWe could show a warning when a dataset is loaded with `load_dataset('...',streaming=True)` when its lot streamable, e.g. if it is an archive. \r\n\r\n**Describe alternatives you've considered**\r\nAdd a new metadata tag for \"streaming\"\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2653\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2653","id":945102321,"node_id":"MDU6SXNzdWU5NDUxMDIzMjE=","number":2653,"title":"Add SD task for SUPERB","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":5,"closed_issues":2,"state":"open","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-08-04T17:03:52Z","due_on":"2021-08-21T07:00:00Z","closed_at":null},"comments":2,"created_at":"2021-07-15T07:51:40Z","updated_at":"2021-08-04T17:03:52Z","closed_at":"2021-08-04T17:03:52Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Include the SD (Speaker Diarization) task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051) and `s3prl` [instructions](https:\/\/github.com\/s3prl\/s3prl\/tree\/master\/s3prl\/downstream#sd-speaker-diarization).\r\n\r\nSteps:\r\n- [x] Generate the LibriMix corpus\r\n- [x] Prepare the corpus for diarization\r\n- [x] Upload these files to the superb-data repo\r\n- [x] Transcribe the corresponding s3prl processing of these files into our superb loading script\r\n- [ ] README: tags + description sections\r\n\r\nRelated to #2619.\r\n\r\ncc: @lewtun \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2652\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2652","id":944865924,"node_id":"MDExOlB1bGxSZXF1ZXN0NjkwMjg0MTI4","number":2652,"title":"Fix logging docstring","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-14T23:19:58Z","updated_at":"2021-07-18T11:41:06Z","closed_at":"2021-07-15T09:57:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2652","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2652","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2652.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2652.patch"},"body":"Remove \"no tqdm bars\" from the docstring in the logging module to align it with the changes introduced in #2534.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2651\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2651","id":944796961,"node_id":"MDU6SXNzdWU5NDQ3OTY5NjE=","number":2651,"title":"Setting log level higher than warning does not suppress progress bar","user":{"login":"Isa-rentacs","id":1147443,"node_id":"MDQ6VXNlcjExNDc0NDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1147443?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Isa-rentacs","html_url":"https:\/\/github.com\/Isa-rentacs","followers_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/followers","following_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/orgs","repos_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/repos","events_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Isa-rentacs\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-14T21:06:51Z","updated_at":"2021-07-23T13:03:10Z","closed_at":"2021-07-15T03:41:35Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI would like to disable progress bars for `.map` method (and other methods like `.filter` and `load_dataset` as well).\r\nAccording to #1627 one can suppress it by setting log level higher than `warning`, however doing so doesn't suppress it with version 1.9.0.\r\n\r\nI also tried to set `DATASETS_VERBOSITY` environment variable to `error` or `critical` but it also didn't work.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\nfrom datasets.utils.logging import set_verbosity_error\r\n\r\nset_verbosity_error()\r\n\r\ndef dummy_map(batch):\r\n return batch\r\n\r\ncommon_voice_train = datasets.load_dataset(\"common_voice\", \"de\", split=\"train\")\r\ncommon_voice_test = datasets.load_dataset(\"common_voice\", \"de\", split=\"test\")\r\n\r\ncommon_voice_train.map(dummy_map)\r\n```\r\n\r\n## Expected results\r\n- The progress bar for `.map` call won't be shown\r\n\r\n## Actual results\r\n- The progress bar for `.map` is still shown \r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-5.4.0-1045-aws-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.5\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2650\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2650","id":944672565,"node_id":"MDU6SXNzdWU5NDQ2NzI1NjU=","number":2650,"title":"[load_dataset] shard and parallelize the process","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-14T18:04:58Z","updated_at":"2021-07-14T18:05:16Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"- Some huge datasets take forever to build the first time. (e.g. oscar\/en) as it's done in a single cpu core.\r\n- If the build crashes, everything done up to that point gets lost\r\n\r\nRequest: Shard the build over multiple arrow files, which would enable:\r\n- much faster build by parallelizing the build process\r\n- if the process crashed, the completed arrow files don't need to be re-built again\r\n\r\nThank you!\r\n\r\n@lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2649\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2649","id":944651229,"node_id":"MDU6SXNzdWU5NDQ2NTEyMjk=","number":2649,"title":"adding progress bar \/ ETA for `load_dataset`","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-14T17:34:39Z","updated_at":"2021-07-14T17:38:00Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Please consider:\r\n```\r\nDownloading and preparing dataset oscar\/unshuffled_deduplicated_en (download: 462.40 GiB, generated: 1.18 TiB, post-processed: Unknown size, total: 1.63 TiB) to cache\/oscar\/unshuffled_deduplicated_en\/1.0.0\/84838bd49d2295f62008383b05620571535451d84545037bb94d6f3501651df2...\r\nHF google storage unreachable. Downloading and preparing it from source\r\n```\r\nand no indication whatsoever of whether things work well or when it'll be done. It's important to have an estimated completion time for when doing slurm jobs since some instances have a cap on run-time.\r\n\r\nI think for this particular job it sat for 30min in total silence and then after 30min it started generating:\r\n```\r\n897850 examples [07:24, 10286.71 examples\/s]\r\n```\r\nwhich is already great!\r\n\r\nRequest: \r\n1. ETA - knowing how many hours to allocate for a slurm job\r\n2. progress bar - helps to know things are working and aren't stuck and where we are at.\r\n\r\nThank you!\r\n\r\n@lhoestq \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2648\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2648","id":944484522,"node_id":"MDU6SXNzdWU5NDQ0ODQ1MjI=","number":2648,"title":"Add web_split dataset for Paraphase and Rephrase benchmark","user":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"assignees":[{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-14T14:24:36Z","updated_at":"2021-07-14T14:26:12Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe:\r\nFor getting simple sentences from complex sentence there are dataset and task like wiki_split that is available in hugging face datasets. This web_split is a very similar dataset. There some research paper which states that by combining these two datasets we if we train the model it will yield better results on both tests data.\r\n\r\nThis dataset is made from web NLG data.\r\n\r\nAll the dataset related details are provided in the below repository\r\n\r\nGithub link: https:\/\/github.com\/shashiongithub\/Split-and-Rephrase\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2647\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2647","id":944424941,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg5OTExMzky","number":2647,"title":"Fix anchor in README","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-14T13:22:44Z","updated_at":"2021-07-18T11:41:18Z","closed_at":"2021-07-15T06:50:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2647","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2647","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2647.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2647.patch"},"body":"I forgot to push this fix in #2611, so I'm sending it now. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2646\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2646","id":944379954,"node_id":"MDU6SXNzdWU5NDQzNzk5NTQ=","number":2646,"title":"downloading of yahoo_answers_topics dataset failed","user":{"login":"vikrant7k","id":66781249,"node_id":"MDQ6VXNlcjY2NzgxMjQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66781249?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vikrant7k","html_url":"https:\/\/github.com\/vikrant7k","followers_url":"https:\/\/api.github.com\/users\/vikrant7k\/followers","following_url":"https:\/\/api.github.com\/users\/vikrant7k\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vikrant7k\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vikrant7k\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vikrant7k\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vikrant7k\/orgs","repos_url":"https:\/\/api.github.com\/users\/vikrant7k\/repos","events_url":"https:\/\/api.github.com\/users\/vikrant7k\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vikrant7k\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-14T12:31:05Z","updated_at":"2021-07-15T09:15:16Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI get an error datasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files when I try to download the yahoo_answers_topics dataset\r\n\r\n## Steps to reproduce the bug\r\n self.dataset = load_dataset(\r\n 'yahoo_answers_topics', cache_dir=self.config['yahoo_cache_dir'], split='train[:90%]')\r\n# Sample code to reproduce the bug\r\n self.dataset = load_dataset(\r\n 'yahoo_answers_topics', cache_dir=self.config['yahoo_cache_dir'], split='train[:90%]')\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2645\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2645","id":944374284,"node_id":"MDU6SXNzdWU5NDQzNzQyODQ=","number":2645,"title":"load_dataset processing failed with OS error after downloading a dataset","user":{"login":"fake-warrior8","id":40395156,"node_id":"MDQ6VXNlcjQwMzk1MTU2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40395156?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fake-warrior8","html_url":"https:\/\/github.com\/fake-warrior8","followers_url":"https:\/\/api.github.com\/users\/fake-warrior8\/followers","following_url":"https:\/\/api.github.com\/users\/fake-warrior8\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fake-warrior8\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fake-warrior8\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fake-warrior8\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fake-warrior8\/orgs","repos_url":"https:\/\/api.github.com\/users\/fake-warrior8\/repos","events_url":"https:\/\/api.github.com\/users\/fake-warrior8\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fake-warrior8\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-14T12:23:53Z","updated_at":"2021-07-15T09:34:02Z","closed_at":"2021-07-15T09:34:02Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nAfter downloading a dataset like opus100, there is a bug that \r\nOSError: Cannot find data file.\r\nOriginal error:\r\ndlopen: cannot load any more object with static TLS\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nthis_dataset = load_dataset('opus100', 'af-en')\r\n```\r\n\r\n## Expected results\r\nthere is no error when running load_dataset.\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prep\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 989, in _prepare_split\r\n example = self.info.features.encode_example(record)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/features.py\", line 952, in encode_example\r\n example = cast_to_python_objects(example)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/features.py\", line 219, in cast_to_python_ob\r\n return _cast_to_python_objects(obj)[0]\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/features.py\", line 165, in _cast_to_python_o\r\n import torch\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/torch\/__init__.py\", line 188, in \r\n _load_global_deps()\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/torch\/__init__.py\", line 141, in _load_global_deps\r\n ctypes.CDLL(lib_path, mode=ctypes.RTLD_GLOBAL)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/ctypes\/__init__.py\", line 348, in __init__\r\n self._handle = _dlopen(self._name, mode)\r\nOSError: dlopen: cannot load any more object with static TLS\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"download_hub_opus100.py\", line 9, in \r\n this_dataset = load_dataset('opus100', language_pair)\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepa\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/anaconda3\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 658, in _download_and_prep\r\n + str(e)\r\nOSError: Cannot find data file.\r\nOriginal error:\r\ndlopen: cannot load any more object with static TLS\r\n\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-3.13.0-32-generic-x86_64-with-debian-jessie-sid\r\n- Python version: 3.6.6\r\n- PyArrow version: 3.0.0\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2644\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2644","id":944254748,"node_id":"MDU6SXNzdWU5NDQyNTQ3NDg=","number":2644,"title":"Batched `map` not allowed to return 0 items","user":{"login":"pcuenca","id":1177582,"node_id":"MDQ6VXNlcjExNzc1ODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1177582?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pcuenca","html_url":"https:\/\/github.com\/pcuenca","followers_url":"https:\/\/api.github.com\/users\/pcuenca\/followers","following_url":"https:\/\/api.github.com\/users\/pcuenca\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pcuenca\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pcuenca\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pcuenca\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pcuenca\/orgs","repos_url":"https:\/\/api.github.com\/users\/pcuenca\/repos","events_url":"https:\/\/api.github.com\/users\/pcuenca\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pcuenca\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-07-14T09:58:19Z","updated_at":"2021-07-26T14:55:15Z","closed_at":"2021-07-26T14:55:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI'm trying to use `map` to filter a large dataset by selecting rows that match an expensive condition (files referenced by one of the columns need to exist in the filesystem, so we have to `stat` them). According to [the documentation](https:\/\/huggingface.co\/docs\/datasets\/processing.html#augmenting-the-dataset), `a batch mapped function can take as input a batch of size N and return a batch of size M where M can be greater or less than N and can even be zero`.\r\n\r\nHowever, when the returned batch has a size of zero (neither item in the batch fulfilled the condition), we get an `index out of bounds` error. I think that `arrow_writer.py` is [trying to infer the returned types using the first element returned](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/arrow_writer.py#L100), but no elements were returned in this case.\r\n\r\nFor this error to happen, I'm returning a dictionary that contains empty lists for the keys I want to keep, see below. If I return an empty dictionary instead (no keys), then a different error eventually occurs.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndef select_rows(examples):\r\n # `key` is a column name that exists in the original dataset\r\n # The following line simulates no matches found, so we return an empty batch\r\n result = {'key': []}\r\n return result\r\n\r\nfiltered_dataset = dataset.map(\r\n select_rows,\r\n remove_columns = dataset.column_names,\r\n batched = True,\r\n num_proc = 1,\r\n desc = \"Selecting rows with images that exist\"\r\n)\r\n```\r\n\r\nThe code above immediately triggers the exception. If we use the following instead:\r\n\r\n```python\r\ndef select_rows(examples):\r\n # `key` is a column name that exists in the original dataset\r\n result = {'key': []} # or defaultdict or whatever\r\n \r\n # code to check for condition and append elements to result\r\n # some_items_found will be set to True if there were any matching elements in the batch\r\n \r\n return result if some_items_found else {}\r\n```\r\n\r\nThen it _seems_ to work, but it eventually fails with some sort of schema error. I believe it may happen when an empty batch is followed by a non-empty one, but haven't set up a test to verify it.\r\n\r\nIn my opinion, returning a dictionary with empty lists and valid column names should be accepted as a valid result with zero items.\r\n\r\n## Expected results\r\nThe dataset would be filtered and only the matching fields would be returned.\r\n\r\n## Actual results\r\nAn exception is encountered, as described. Using a workaround makes it fail further along the line.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.1.dev0\r\n- Platform: Linux-5.4.0-53-generic-x86_64-with-glibc2.17\r\n- Python version: 3.8.10\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2643\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2643","id":944220273,"node_id":"MDU6SXNzdWU5NDQyMjAyNzM=","number":2643,"title":"Enum used in map functions will raise a RecursionError with dill.","user":{"login":"jorgeecardona","id":100702,"node_id":"MDQ6VXNlcjEwMDcwMg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/100702?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jorgeecardona","html_url":"https:\/\/github.com\/jorgeecardona","followers_url":"https:\/\/api.github.com\/users\/jorgeecardona\/followers","following_url":"https:\/\/api.github.com\/users\/jorgeecardona\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jorgeecardona\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jorgeecardona\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jorgeecardona\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jorgeecardona\/orgs","repos_url":"https:\/\/api.github.com\/users\/jorgeecardona\/repos","events_url":"https:\/\/api.github.com\/users\/jorgeecardona\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jorgeecardona\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-14T09:16:08Z","updated_at":"2021-08-09T21:50:13Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nEnums used in functions pass to `map` will fail at pickling with a maximum recursion exception as described here: https:\/\/github.com\/uqfoundation\/dill\/issues\/250#issuecomment-852566284\r\n\r\nIn my particular case, I use an enum to define an argument with fixed options using the `TraininigArguments` dataclass as base class and the `HfArgumentParser`. In the same file I use a `ds.map` that tries to pickle the content of the module including the definition of the enum that runs into the dill bug described above.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nfrom enum import Enum\r\n\r\nclass A(Enum):\r\n a = 'a'\r\n\r\ndef main():\r\n a = A.a\r\n \r\n def f(x):\r\n return {} if a == a.a else x\r\n \r\n ds = load_dataset('cnn_dailymail', '3.0.0')['test']\r\n ds = ds.map(f, num_proc=15)\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n```\r\n\r\n## Expected results\r\nThe known problem with dill could be prevented as explained in the link above (workaround.) Since `HFArgumentParser` nicely uses the enum class for choices it makes sense to also deal with this bug under the hood.\r\n\r\n## Actual results\r\n\r\n```python\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/site-packages\/dill\/_dill.py\", line 1373, in save_type\r\n pickler.save_reduce(_create_type, (type(obj), obj.__name__,\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 690, in save_reduce\r\n save(args)\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 558, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 899, in save_tuple\r\n save(element)\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 534, in save\r\n self.framer.commit_frame()\r\n File \"\/home\/xxxx\/miniconda3\/lib\/python3.8\/pickle.py\", line 220, in commit_frame\r\n if f.tell() >= self._FRAME_SIZE_TARGET or force:\r\nRecursionError: maximum recursion depth exceeded while calling a Python object\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.9.0-4-amd64-x86_64-with-glibc2.10\r\n- Python version: 3.8.5\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2642\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2642","id":944175697,"node_id":"MDU6SXNzdWU5NDQxNzU2OTc=","number":2642,"title":"Support multi-worker with streaming dataset (IterableDataset).","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-14T08:22:58Z","updated_at":"2021-07-15T09:37:34Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nThe current `.map` does not support multi-process, CPU can become bottleneck if the pre-processing is complex (e.g. t5 span masking).\r\n\r\n**Describe the solution you'd like**\r\nIdeally `.map` should support multi-worker like tfds, with `AUTOTUNE`.\r\n\r\n**Describe alternatives you've considered**\r\nA simpler solution is to shard the dataset and process it in parallel with pytorch dataloader. The shard does not need to be of equal size.\r\n* https:\/\/pytorch.org\/docs\/stable\/data.html#torch.utils.data.IterableDataset\r\n\r\n**Additional context**\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2641\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2641","id":943838085,"node_id":"MDU6SXNzdWU5NDM4MzgwODU=","number":2641,"title":"load_dataset(\"financial_phrasebank\") NonMatchingChecksumError","user":{"login":"courtmckay","id":13956255,"node_id":"MDQ6VXNlcjEzOTU2MjU1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13956255?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/courtmckay","html_url":"https:\/\/github.com\/courtmckay","followers_url":"https:\/\/api.github.com\/users\/courtmckay\/followers","following_url":"https:\/\/api.github.com\/users\/courtmckay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/courtmckay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/courtmckay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/courtmckay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/courtmckay\/orgs","repos_url":"https:\/\/api.github.com\/users\/courtmckay\/repos","events_url":"https:\/\/api.github.com\/users\/courtmckay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/courtmckay\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-13T21:21:49Z","updated_at":"2021-07-19T13:26:10Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nAttempting to download the financial_phrasebank dataset results in a NonMatchingChecksumError\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"financial_phrasebank\", 'sentences_allagree')\r\n```\r\n\r\n## Expected results\r\nI expect to see the financial_phrasebank dataset downloaded successfully\r\n\r\n## Actual results\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/www.researchgate.net\/profile\/Pekka_Malo\/publication\/251231364_FinancialPhraseBank-v10\/data\/0c96051eee4fb1d56e000000\/FinancialPhraseBank-v10.zip']\r\n\r\n## Environment info\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-4.14.232-177.418.amzn2.x86_64-x86_64-with-debian-10.6\r\n- Python version: 3.7.10\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2640\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2640","id":943591055,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg5MjAxMDkw","number":2640,"title":"Fix docstrings","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-13T16:09:14Z","updated_at":"2021-07-15T06:51:01Z","closed_at":"2021-07-15T06:06:12Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2640","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2640","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2640.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2640.patch"},"body":"Fix rendering of some docstrings.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2639\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2639","id":943527463,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg5MTQ3NDE5","number":2639,"title":"Refactor patching to specific submodule","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-13T15:08:45Z","updated_at":"2021-07-13T16:52:49Z","closed_at":"2021-07-13T16:52:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2639","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2639","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2639.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2639.patch"},"body":"Minor reorganization of the code, so that additional patching functions (not related to streaming) might be created.\r\n\r\nIn relation with the initial approach followed in #2631.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2638\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2638","id":943484913,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg5MTA5NTg1","number":2638,"title":"Streaming for the Json loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-13T14:37:06Z","updated_at":"2021-07-16T15:59:32Z","closed_at":"2021-07-16T15:59:31Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2638","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2638","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2638.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2638.patch"},"body":"It was not using `open` in the builder. Therefore `pyarrow.json.read_json` was downloading the full file to start yielding rows.\r\n\r\nMoreover, it appeared that `pyarrow.json.read_json` was not really suited for streaming as it was downloading too much data and failing if `block_size` was not properly configured (related to #2573).\r\n\r\nSo I switched to using `open` which is extended to support reading from remote file progressively, and I removed the pyarrow json reader which was not practical.\r\nInstead, I'm using the classical `json.loads` from the standard library.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2637\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2637","id":943290736,"node_id":"MDU6SXNzdWU5NDMyOTA3MzY=","number":2637,"title":"Add the CIDEr metric?","user":{"login":"zuujhyt","id":75845952,"node_id":"MDQ6VXNlcjc1ODQ1OTUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75845952?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zuujhyt","html_url":"https:\/\/github.com\/zuujhyt","followers_url":"https:\/\/api.github.com\/users\/zuujhyt\/followers","following_url":"https:\/\/api.github.com\/users\/zuujhyt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zuujhyt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zuujhyt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zuujhyt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zuujhyt\/orgs","repos_url":"https:\/\/api.github.com\/users\/zuujhyt\/repos","events_url":"https:\/\/api.github.com\/users\/zuujhyt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zuujhyt\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-13T12:22:51Z","updated_at":"2021-07-13T12:22:51Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\nI find the api in https:\/\/huggingface.co\/metrics quite useful.\r\nI am playing around with video\/image captioning task, where CIDEr is a popular metric.\r\nDo you plan to add this into the HF ```datasets``` library?\r\nThanks.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2636\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2636","id":943044514,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4NzEyMTY4","number":2636,"title":"Streaming for the Pandas loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-13T09:18:21Z","updated_at":"2021-07-13T14:37:24Z","closed_at":"2021-07-13T14:37:23Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2636","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2636","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2636.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2636.patch"},"body":"It was not using open in the builder. Therefore pd.read_pickle could fail when streaming from a private repo for example.\r\n\r\nIndeed, when streaming, open is extended to support reading from remote files and handles authentication to the HF Hub","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2635\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2635","id":943030999,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4Njk5OTM5","number":2635,"title":"Streaming for the CSV loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-13T09:08:58Z","updated_at":"2021-07-13T15:19:38Z","closed_at":"2021-07-13T15:19:37Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2635","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2635","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2635.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2635.patch"},"body":"It was not using `open` in the builder. Therefore `pd.read_csv` was downloading the full file to start yielding rows.\r\n\r\nIndeed, when streaming, `open` is extended to support reading from remote file progressively.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2634\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2634","id":942805621,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4NDk2Mzc2","number":2634,"title":"Inject ASR template for lj_speech dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-13T06:04:54Z","updated_at":"2021-07-13T09:05:09Z","closed_at":"2021-07-13T09:05:09Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2634","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2634","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2634.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2634.patch"},"body":"Related to: #2565, #2633.\r\n\r\ncc: @lewtun ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2633\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2633","id":942396414,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4MTMwOTA5","number":2633,"title":"Update ASR tags","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-12T19:58:31Z","updated_at":"2021-07-13T05:45:26Z","closed_at":"2021-07-13T05:45:13Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2633","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2633","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2633.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2633.patch"},"body":"This PR updates the ASR tags of the 5 datasets added in #2565 following the change of task categories in #2620 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2632\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2632","id":942293727,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg4MDQyMjcw","number":2632,"title":"add image-classification task template","user":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-12T17:41:03Z","updated_at":"2021-07-13T15:44:28Z","closed_at":"2021-07-13T15:28:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2632","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2632","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2632.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2632.patch"},"body":"Snippet below is the tl;dr, but you can try it out directly here:\r\n\r\n[![Open In Collab](https:\/\/colab.research.google.com\/assets\/colab-badge.svg)](https:\/\/colab.research.google.com\/gist\/nateraw\/005c025d41f0e48ae3d4ee61c0f20b70\/image-classification-task-template-demo.ipynb)\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nds = load_dataset('nateraw\/image-folder', data_files='PetImages\/')\r\n# DatasetDict({\r\n# train: Dataset({\r\n# features: ['file', 'labels'],\r\n# num_rows: 23410\r\n# })\r\n# })\r\n\r\nds = ds.prepare_for_task('image-classification')\r\n# DatasetDict({\r\n# train: Dataset({\r\n# features: ['image_file_path', 'labels'],\r\n# num_rows: 23410\r\n# })\r\n# })\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2631\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2631","id":942242271,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3OTk3MzM2","number":2631,"title":"Delete extracted files when loading dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":13,"created_at":"2021-07-12T16:39:33Z","updated_at":"2021-07-19T09:08:19Z","closed_at":"2021-07-19T09:08:19Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2631","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2631","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2631.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2631.patch"},"body":"Close #2481, close #2604, close #2591.\r\n\r\ncc: @stas00, @thomwolf, @BirgerMoell ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2630\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2630","id":942102956,"node_id":"MDU6SXNzdWU5NDIxMDI5NTY=","number":2630,"title":"Progress bars are not properly rendered in Jupyter notebook","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-12T14:07:13Z","updated_at":"2021-07-13T07:20:32Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe progress bars are not Jupyter widgets; regular progress bars appear (like in a terminal).\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nds.map(tokenize, num_proc=10)\r\n```\r\n\r\n## Expected results\r\nJupyter widgets displaying the progress bars.\r\n\r\n## Actual results\r\nSimple plane progress bars.\r\n\r\ncc: Reported by @thomwolf ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2629\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2629","id":941819205,"node_id":"MDU6SXNzdWU5NDE4MTkyMDU=","number":2629,"title":"Load datasets from the Hub without requiring a dataset script","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-12T08:45:17Z","updated_at":"2021-07-12T15:21:04Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As a user I would like to be able to upload my csv\/json\/text\/parquet\/etc. files in a dataset repository on the Hugging Face Hub and be able to load this dataset with `load_dataset` without having to implement a dataset script.\r\n\r\nMoreover I would like to be able to specify which file goes into which split using the `data_files` argument.\r\n\r\nThis feature should be compatible with private repositories and dataset streaming.\r\n\r\nThis can be implemented by checking the extension of the files in the dataset repository and then by using the right dataset builder that is already packaged in the library (csv\/json\/text\/parquet\/etc.)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2628\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2628","id":941676404,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3NTE0NzQz","number":2628,"title":"Use ETag of remote data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-12T05:10:10Z","updated_at":"2021-07-12T14:08:34Z","closed_at":"2021-07-12T08:40:07Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2628","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2628","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2628.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2628.patch"},"body":"Use ETag of remote data files to create config ID.\r\n\r\nRelated to #2616.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2627\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2627","id":941503349,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3MzczMDg1","number":2627,"title":"Minor fix tests with Windows paths","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-11T17:55:48Z","updated_at":"2021-07-12T14:08:47Z","closed_at":"2021-07-12T08:34:50Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2627","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2627","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2627.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2627.patch"},"body":"Minor fix tests with Windows paths.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2626\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2626","id":941497830,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3MzY4OTMz","number":2626,"title":"Use correct logger in metrics.py","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-11T17:22:30Z","updated_at":"2021-07-12T14:08:54Z","closed_at":"2021-07-12T05:54:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2626","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2626","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2626.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2626.patch"},"body":"Fixes #2624 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2625\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2625","id":941439922,"node_id":"MDU6SXNzdWU5NDE0Mzk5MjI=","number":2625,"title":"\u269b\ufe0f\ud83d\ude07\u2699\ufe0f\ud83d\udd11","user":{"login":"hustlen0mics","id":50596661,"node_id":"MDQ6VXNlcjUwNTk2NjYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50596661?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hustlen0mics","html_url":"https:\/\/github.com\/hustlen0mics","followers_url":"https:\/\/api.github.com\/users\/hustlen0mics\/followers","following_url":"https:\/\/api.github.com\/users\/hustlen0mics\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hustlen0mics\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hustlen0mics\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hustlen0mics\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hustlen0mics\/orgs","repos_url":"https:\/\/api.github.com\/users\/hustlen0mics\/repos","events_url":"https:\/\/api.github.com\/users\/hustlen0mics\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hustlen0mics\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-11T12:14:34Z","updated_at":"2021-07-12T05:55:59Z","closed_at":"2021-07-12T05:55:59Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2624\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2624","id":941318247,"node_id":"MDU6SXNzdWU5NDEzMTgyNDc=","number":2624,"title":"can't set verbosity for `metric.py`","user":{"login":"thomas-happify","id":66082334,"node_id":"MDQ6VXNlcjY2MDgyMzM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/66082334?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomas-happify","html_url":"https:\/\/github.com\/thomas-happify","followers_url":"https:\/\/api.github.com\/users\/thomas-happify\/followers","following_url":"https:\/\/api.github.com\/users\/thomas-happify\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomas-happify\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomas-happify\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomas-happify\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomas-happify\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomas-happify\/repos","events_url":"https:\/\/api.github.com\/users\/thomas-happify\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomas-happify\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-10T20:23:45Z","updated_at":"2021-07-12T05:54:29Z","closed_at":"2021-07-12T05:54:29Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n```\r\n[2021-07-10 20:13:11,528][datasets.utils.filelock][INFO] - Lock 139705371374976 acquired on \/root\/.cache\/huggingface\/metrics\/seqeval\/default\/default_experiment-1-0.arrow.lock\r\n[2021-07-10 20:13:11,529][datasets.arrow_writer][INFO] - Done writing 32 examples in 6100 bytes \/root\/.cache\/huggingface\/metrics\/seqeval\/default\/default_experiment-1-0.arrow.\r\n[2021-07-10 20:13:11,531][datasets.arrow_dataset][INFO] - Set __getitem__(key) output type to python objects for no columns (when key is int or slice) and don't output other (un-formatted) columns.\r\n[2021-07-10 20:13:11,543][\/conda\/envs\/myenv\/lib\/python3.8\/site-packages\/datasets\/metric.py][INFO] - Removing \/root\/.cache\/huggingface\/metrics\/seqeval\/default\/default_experiment-1-0.arrow\r\n```\r\nAs you can see, `datasets` logging come from different places. \r\n`filelock`, `arrow_writer` & `arrow_dataset` comes from `datasets.*` which are expected \r\nHowever, `metric.py` logging comes from `\/conda\/envs\/myenv\/lib\/python3.8\/site-packages\/datasets\/`\r\n\r\nSo when setting `datasets.utils.logging.set_verbosity_error()`, it still logs the last message which is annoying during evaluation. \r\n\r\nI had to do \r\n```\r\nlogging.getLogger(\"\/conda\/envs\/myenv\/lib\/python3.8\/site-packages\/datasets\/metric\").setLevel(logging.ERROR)\r\n``` \r\nto fully mute these messages\r\n\r\n## Expected results\r\nit shouldn't log these messages when setting `datasets.utils.logging.set_verbosity_error()`\r\n\r\n## Environment info\r\n\r\n- `datasets` version: tried both 1.8.0 & 1.9.0\r\n- Platform: Ubuntu 18.04.5 LTS \r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2623\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2623","id":941265342,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg3MTk0MjM3","number":2623,"title":"[Metrics] added wiki_split metrics","user":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"assignees":[{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-10T14:51:50Z","updated_at":"2021-07-14T14:28:13Z","closed_at":"2021-07-12T22:34:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2623","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2623","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2623.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2623.patch"},"body":"Fixes: #2606\r\n\r\nThis pull request adds combine metrics for the wikisplit or English sentence split task\r\n\r\nReviewer: @patrickvonplaten ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2622\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2622","id":941127785,"node_id":"MDU6SXNzdWU5NDExMjc3ODU=","number":2622,"title":"Integration with AugLy","user":{"login":"Darktex","id":890615,"node_id":"MDQ6VXNlcjg5MDYxNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/890615?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Darktex","html_url":"https:\/\/github.com\/Darktex","followers_url":"https:\/\/api.github.com\/users\/Darktex\/followers","following_url":"https:\/\/api.github.com\/users\/Darktex\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Darktex\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Darktex\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Darktex\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Darktex\/orgs","repos_url":"https:\/\/api.github.com\/users\/Darktex\/repos","events_url":"https:\/\/api.github.com\/users\/Darktex\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Darktex\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-10T00:03:09Z","updated_at":"2021-07-11T17:08:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nFacebook recently launched a library, [AugLy](https:\/\/github.com\/facebookresearch\/AugLy) , that has a unified API for augmentations for image, video and text.\r\n\r\nIt would be pretty exciting to have it hooked up to HF libraries so that we can make NLP models robust to misspellings or to punctuation, or emojis etc. Plus, with Transformers supporting more CV use cases, having augmentations support becomes crucial.\r\n\r\n**Describe the solution you'd like**\r\nThe biggest difference between augmentations and preprocessing is that preprocessing happens only once, but you are running augmentations once per epoch. AugLy operates on text directly, so this breaks the typical workflow where we would run the tokenizer once, set format to pt tensors and be ready for the Dataloader.\r\n\r\n**Describe alternatives you've considered**\r\n\r\nOne possible way of implementing these is to make a custom Dataset class where getitem(i) runs the augmentation and the tokenizer every time, though this would slow training down considerably given we wouldn't even run the tokenizer in batches.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2621\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2621","id":940916446,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2OTE1Mzcw","number":2621,"title":"Use prefix to allow exceed Windows MAX_PATH","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-07-09T16:39:53Z","updated_at":"2021-07-16T15:28:12Z","closed_at":"2021-07-16T15:28:11Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2621","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2621","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2621.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2621.patch"},"body":"By using this prefix, you can exceed the Windows MAX_PATH limit.\r\n\r\nSee: https:\/\/docs.microsoft.com\/en-us\/windows\/win32\/fileio\/naming-a-file?redirectedfrom=MSDN#win32-file-namespaces\r\n\r\nRelated to #2524, #2220.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2620\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2620","id":940893389,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2ODk3MDky","number":2620,"title":"Add speech processing tasks","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-09T16:07:29Z","updated_at":"2021-07-12T18:32:59Z","closed_at":"2021-07-12T17:32:02Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2620","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2620","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2620.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2620.patch"},"body":"This PR replaces the `automatic-speech-recognition` task category with a broader `speech-processing` category. \r\n\r\nThe tasks associated with this category are derived from the [SUPERB benchmark](https:\/\/arxiv.org\/abs\/2105.01051), and ASR is included in this set.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2619\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2619","id":940858236,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2ODY3NDA4","number":2619,"title":"Add ASR task for SUPERB","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":3,"created_at":"2021-07-09T15:19:45Z","updated_at":"2021-07-15T08:55:58Z","closed_at":"2021-07-13T12:40:18Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2619","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2619","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2619.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2619.patch"},"body":"This PR starts building up the SUPERB benchmark by including the ASR task as described in the [SUPERB paper](https:\/\/arxiv.org\/abs\/2105.01051) and `s3prl` [instructions](https:\/\/github.com\/s3prl\/s3prl\/tree\/v0.2.0\/downstream#asr-automatic-speech-recognition).\r\n\r\nUsage:\r\n\r\n```python\r\nfrom datasets import load_dataset \r\n\r\nasr = load_dataset(\"superb\", \"asr\")\r\n# DatasetDict({\r\n# train: Dataset({\r\n# features: ['file', 'text', 'speaker_id', 'chapter_id', 'id'],\r\n# num_rows: 28539\r\n# })\r\n# validation: Dataset({\r\n# features: ['file', 'text', 'speaker_id', 'chapter_id', 'id'],\r\n# num_rows: 2703\r\n# })\r\n# test: Dataset({\r\n# features: ['file', 'text', 'speaker_id', 'chapter_id', 'id'],\r\n# num_rows: 2620\r\n# })\r\n# })\r\n```\r\n\r\nI've used the GLUE benchmark as a guide for filling out the README.\r\n\r\nTo move fast during the evaluation PoC I propose to merge one task at a time, so we can continue building the training \/ evaluation framework in parallel.\r\n\r\nNote: codewise this PR is ready for review - I'll add the missing YAML tags once #2620 is merged :)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2618\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2618","id":940852640,"node_id":"MDU6SXNzdWU5NDA4NTI2NDA=","number":2618,"title":"`filelock.py` Error","user":{"login":"liyucheng09","id":27999909,"node_id":"MDQ6VXNlcjI3OTk5OTA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27999909?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/liyucheng09","html_url":"https:\/\/github.com\/liyucheng09","followers_url":"https:\/\/api.github.com\/users\/liyucheng09\/followers","following_url":"https:\/\/api.github.com\/users\/liyucheng09\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/liyucheng09\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/liyucheng09\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/liyucheng09\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/liyucheng09\/orgs","repos_url":"https:\/\/api.github.com\/users\/liyucheng09\/repos","events_url":"https:\/\/api.github.com\/users\/liyucheng09\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/liyucheng09\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-09T15:12:49Z","updated_at":"2021-07-12T06:20:30Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nIt seems that the `filelock.py` went error. \r\n\r\n```\r\n>>> ds=load_dataset('xsum')\r\n\r\n^CTraceback (most recent call last):\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 402, in _acquire\r\n fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)\r\nOSError: [Errno 37] No locks available\r\n```\r\n\r\nAccording to error log, it is OSError, but there is an `except` in the `_acquire` function.\r\n\r\n```\r\n def _acquire(self):\r\n open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC\r\n try:\r\n fd = os.open(self._lock_file, open_mode)\r\n except (IOError, OSError):\r\n pass\r\n else:\r\n self._lock_file_fd = fd\r\n return None\r\n```\r\n\r\nI don't know why it stucked rather than `pass` directly.\r\n\r\nI am not quite familiar with filelock operation, so any help is highly appriciated.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n\r\nds = load_dataset('xsum')\r\n```\r\n\r\n## Expected results\r\nA clear and concise description of the expected results.\r\n\r\n## Actual results\r\n```\r\n>>> ds=load_dataset('xsum')\r\n\r\n^CTraceback (most recent call last):\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 402, in _acquire\r\n fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)\r\nOSError: [Errno 37] No locks available\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 818, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 470, in prepare_module\r\n with FileLock(lock_path):\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 323, in __enter__\r\n self.acquire()\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 272, in acquire\r\n self._acquire()\r\n File \"\/user\/HS502\/yl02706\/.conda\/envs\/lyc\/lib\/python3.6\/site-packages\/datasets\/utils\/filelock.py\", line 402, in _acquire\r\n fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)\r\nKeyboardInterrupt\r\n```\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-4.15.0-135-generic-x86_64-with-debian-buster-sid\r\n- Python version: 3.6.13\r\n- PyArrow version: 4.0.1\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2617\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2617","id":940846847,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2ODU3NzQz","number":2617,"title":"Fix missing EOL issue in to_json for old versions of pandas","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-09T15:05:45Z","updated_at":"2021-07-12T14:09:00Z","closed_at":"2021-07-09T15:28:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2617","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2617","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2617.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2617.patch"},"body":"Some versions of pandas don't add an EOL at the end of the output of `to_json`.\r\nTherefore users could end up having two samples in the same line\r\n\r\nClose https:\/\/github.com\/huggingface\/datasets\/issues\/2615","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2616\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2616","id":940799038,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2ODE3NjYz","number":2616,"title":"Support remote data files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":2,"created_at":"2021-07-09T14:07:38Z","updated_at":"2021-07-09T16:13:41Z","closed_at":"2021-07-09T16:13:41Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2616","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2616","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2616.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2616.patch"},"body":"Add support for (streaming) remote data files:\r\n\r\n```python\r\ndata_files = f\"https:\/\/huggingface.co\/datasets\/{repo_id}\/resolve\/main\/{relative_file_path}\"\r\nds = load_dataset(\"json\", split=\"train\", data_files=data_files, streaming=True)\r\n```\r\n\r\ncc: @thomwolf ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2615\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2615","id":940794339,"node_id":"MDU6SXNzdWU5NDA3OTQzMzk=","number":2615,"title":"Jsonlines export error","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":10,"created_at":"2021-07-09T14:02:05Z","updated_at":"2021-07-09T15:29:07Z","closed_at":"2021-07-09T15:28:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen exporting large datasets in jsonlines (c4 in my case) the created file has an error every 9999 lines: the 9999th and 10000th are concatenated, thus breaking the jsonlines format. This sounds like it is related to batching, which is by 10000 by default\r\n\r\n## Steps to reproduce the bug\r\nThis what I'm running:\r\n\r\nin python:\r\n\r\n```\r\nfrom datasets import load_dataset\r\nptb = load_dataset(\"ptb_text_only\")\r\nptb[\"train\"].to_json(\"ptb.jsonl\")\r\n```\r\n\r\nthen out of python:\r\n\r\n```\r\nhead -10000 ptb.jsonl\r\n```\r\n\r\n## Expected results\r\nProperly separated lines\r\n\r\n## Actual results\r\nThe last line is a concatenation of two lines\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.9.1.dev0\r\n- Platform: Linux-5.4.0-1046-gcp-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.6.9\r\n- PyArrow version: 4.0.1","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2614\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2614","id":940762427,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2Nzg2NTg3","number":2614,"title":"Convert numpy scalar to python float in Pearsonr output","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-09T13:22:55Z","updated_at":"2021-07-12T14:13:02Z","closed_at":"2021-07-09T14:04:38Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2614","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2614","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2614.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2614.patch"},"body":"Following of https:\/\/github.com\/huggingface\/datasets\/pull\/2612","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2613\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2613","id":940759852,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2Nzg0MzY0","number":2613,"title":"Use ndarray.item instead of ndarray.tolist","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-09T13:19:35Z","updated_at":"2021-07-12T14:12:57Z","closed_at":"2021-07-09T13:50:05Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2613","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2613","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2613.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2613.patch"},"body":"This PR follows up on #2612 to use `numpy.ndarray.item` instead of `numpy.ndarray.tolist` as the latter is somewhat confusing to the developer (even though it works).\r\n\r\nJudging from the `numpy` docs, `ndarray.item` is closer to what we want: https:\/\/numpy.org\/doc\/stable\/reference\/generated\/numpy.ndarray.item.html#numpy-ndarray-item\r\n\r\nPS. Sorry for the duplicate work here. I should have read the numpy docs more carefully in #2612 \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2612\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2612","id":940604512,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2NjUwMjk3","number":2612,"title":"Return Python float instead of numpy.float64 in sklearn metrics","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":3,"created_at":"2021-07-09T09:48:09Z","updated_at":"2021-07-12T14:12:53Z","closed_at":"2021-07-09T13:03:54Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2612","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2612","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2612.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2612.patch"},"body":"This PR converts the return type of all `sklearn` metrics to be Python `float` instead of `numpy.float64`.\r\n\r\nThe reason behind this is that our Hub evaluation framework relies on converting benchmark-specific metrics to YAML ([example](https:\/\/huggingface.co\/datasets\/autonlp\/autonlp-benchmark-raft-neelalex__raft-test-neelalex__raft-predictions-3\/blob\/main\/README.md#L11)) and the `numpy.float64` format produces garbage like:\r\n\r\n```python\r\nimport yaml\r\nfrom datasets import load_metric\r\n\r\nmetric = load_metric(\"accuracy\")\r\nscore = metric.compute(predictions=[0,1], references=[0,1])\r\nprint(yaml.dump(score[\"accuracy\"])) # output below\r\n# !!python\/object\/apply:numpy.core.multiarray.scalar\r\n# - !!python\/object\/apply:numpy.dtype\r\n# args:\r\n# - f8\r\n# - false\r\n# - true\r\n# state: !!python\/tuple\r\n# - 3\r\n# - <\r\n# - null\r\n# - null\r\n# - null\r\n# - -1\r\n# - -1\r\n# - 0\r\n# - !!binary |\r\n# AAAAAAAA8D8=\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2611\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2611","id":940307053,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2Mzk5MjU3","number":2611,"title":"More consistent naming","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-09T00:09:17Z","updated_at":"2021-07-13T17:13:19Z","closed_at":"2021-07-13T16:08:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2611","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2611","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2611.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2611.patch"},"body":"As per @stas00's suggestion in #2500, this PR inserts a space between the logo and the lib name (`\ud83e\udd17Datasets` -> `\ud83e\udd17 Datasets`) for consistency with the Transformers lib. Additionally, more consistent names are used for Datasets Hub, etc.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2610\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2610","id":939899829,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg2MDUwMzI5","number":2610,"title":"Add missing WikiANN language tags","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-08T14:08:01Z","updated_at":"2021-07-12T14:12:16Z","closed_at":"2021-07-08T15:44:04Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2610","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2610","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2610.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2610.patch"},"body":"Add missing language tags for WikiANN datasets.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2609\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2609","id":939616682,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg1ODA3MTMz","number":2609,"title":"Fix potential DuplicatedKeysError","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":1,"created_at":"2021-07-08T08:38:04Z","updated_at":"2021-07-12T14:13:16Z","closed_at":"2021-07-09T16:42:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2609","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2609","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2609.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2609.patch"},"body":"Fix potential DiplicatedKeysError by ensuring keys are unique.\r\n\r\nWe should promote as a good practice, that the keys should be programmatically generated as unique, instead of read from data (which might be not unique).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2608\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2608","id":938897626,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg1MjAwMDYw","number":2608,"title":"Support streaming JSON files ","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-07T13:30:22Z","updated_at":"2021-07-12T14:12:31Z","closed_at":"2021-07-08T16:08:41Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2608","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2608","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2608.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2608.patch"},"body":"Use open in JSON dataset builder, so that it can be patched with xopen for streaming.\r\n\r\nClose #2607.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2607\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2607","id":938796902,"node_id":"MDU6SXNzdWU5Mzg3OTY5MDI=","number":2607,"title":"Streaming local gzip compressed JSON line files is not working","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-07-07T11:36:33Z","updated_at":"2021-07-20T09:50:19Z","closed_at":"2021-07-08T16:08:41Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nUsing streaming to iterate on local gzip compressed JSON files raise a file not exist error\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nstreamed_dataset = load_dataset('json', split='train', data_files=data_files, streaming=True)\r\n\r\nnext(iter(streamed_dataset))\r\n```\r\n\r\n## Actual results\r\n```\r\nFileNotFoundError Traceback (most recent call last)\r\n in \r\n----> 1 next(iter(streamed_dataset))\r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/iterable_dataset.py in __iter__(self)\r\n 336 \r\n 337 def __iter__(self):\r\n--> 338 for key, example in self._iter():\r\n 339 if self.features:\r\n 340 # we encode the example for ClassLabel feature types for example\r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/iterable_dataset.py in _iter(self)\r\n 333 else:\r\n 334 ex_iterable = self._ex_iterable\r\n--> 335 yield from ex_iterable\r\n 336 \r\n 337 def __iter__(self):\r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/iterable_dataset.py in __iter__(self)\r\n 76 \r\n 77 def __iter__(self):\r\n---> 78 for key, example in self.generate_examples_fn(**self.kwargs):\r\n 79 yield key, example\r\n 80 \r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/iterable_dataset.py in wrapper(**kwargs)\r\n 282 def wrapper(**kwargs):\r\n 283 python_formatter = PythonFormatter()\r\n--> 284 for key, table in generate_tables_fn(**kwargs):\r\n 285 batch = python_formatter.format_batch(table)\r\n 286 for i, example in enumerate(_batch_to_examples(batch)):\r\n\r\n~\/Documents\/GitHub\/datasets\/src\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files, original_files)\r\n 85 file,\r\n 86 read_options=self.config.pa_read_options,\r\n---> 87 parse_options=self.config.pa_parse_options,\r\n 88 )\r\n 89 except pa.ArrowInvalid as err:\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/_json.pyx in pyarrow._json.read_json()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/_json.pyx in pyarrow._json._get_reader()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.get_input_stream()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.get_native_file()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.OSFile.__cinit__()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/io.pxi in pyarrow.lib.OSFile._open_readable()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/miniconda2\/envs\/datasets\/lib\/python3.7\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nFileNotFoundError: [Errno 2] Failed to open local file 'gzip:\/\/file-000000000000.json::\/Users\/thomwolf\/github-dataset\/file-000000000000.json.gz'. Detail: [errno 2] No such file or directory\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.9.1.dev0\r\n- Platform: Darwin-19.6.0-x86_64-i386-64bit\r\n- Python version: 3.7.7\r\n- PyArrow version: 1.0.0","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2606\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2606","id":938763684,"node_id":"MDU6SXNzdWU5Mzg3NjM2ODQ=","number":2606,"title":"[Metrics] addition of wiki_split metrics","user":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2459308248,"node_id":"MDU6TGFiZWwyNDU5MzA4MjQ4","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20request","name":"metric request","color":"d4c5f9","default":false,"description":"Requesting to add a new metric"}],"state":"closed","locked":false,"assignee":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"assignees":[{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-07T10:56:04Z","updated_at":"2021-07-12T22:34:31Z","closed_at":"2021-07-12T22:34:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nWhile training the model on sentence split the task in English we require to evaluate the trained model on `Exact Match`, `SARI` and `BLEU` score\r\nlike this \r\n![image](https:\/\/user-images.githubusercontent.com\/26653468\/124746876-ff5a3380-df3e-11eb-9a01-4b48db7a6694.png)\r\nWhile training we require metrics which can give all the output\r\n\r\nCurrently, we don't have an exact match for text normalized data\r\n\r\n**Describe the solution you'd like**\r\nA custom metrics for wiki_split that can calculate these three values and provide it in the form of a single dictionary\r\nFor exact match, we can refer to [this](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/src\/transformers\/data\/metrics\/squad_metrics.py) \r\n\r\n**Describe alternatives you've considered**\r\nTwo metrics are already present one more can be added for an exact match then we can run all three metrics in training script\r\n\r\n#self-assign","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2605\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2605","id":938648164,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0OTkyODIz","number":2605,"title":"Make any ClientError trigger retry in streaming mode (e.g. ClientOSError)","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-07T08:47:23Z","updated_at":"2021-07-12T14:10:27Z","closed_at":"2021-07-07T08:59:13Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2605","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2605","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2605.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2605.patch"},"body":"During the FLAX sprint some users have this error when streaming datasets:\r\n```python\r\naiohttp.client_exceptions.ClientOSError: [Errno 104] Connection reset by peer\r\n```\r\nThis error must trigger a retry instead of directly crashing\r\n\r\nTherefore I extended the error type that triggers the retry to be the base aiohttp error type: `ClientError`\r\nIn particular both `ClientOSError` and `ServerDisconnectedError` inherit from `ClientError`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2604\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2604","id":938602237,"node_id":"MDU6SXNzdWU5Mzg2MDIyMzc=","number":2604,"title":"Add option to delete temporary files (e.g. extracted files) when loading dataset","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":14,"created_at":"2021-07-07T07:56:16Z","updated_at":"2021-07-19T09:08:18Z","closed_at":"2021-07-19T09:08:18Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"I'm loading a dataset constituted of 44 GB of compressed JSON files.\r\n\r\nWhen loading the dataset with the JSON script, extracting the files create about 200 GB of uncompressed files before creating the 180GB of arrow cache tables\r\n\r\nHaving a simple way to delete the extracted files after usage (or even better, to stream extraction\/delete) would be nice to avoid disk cluter.\r\n\r\nI can maybe tackle this one in the JSON script unless you want a more general solution.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2603\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2603","id":938588149,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0OTQ0ODcz","number":2603,"title":"Fix DuplicatedKeysError in omp","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-07T07:38:32Z","updated_at":"2021-07-12T14:10:41Z","closed_at":"2021-07-07T12:56:35Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2603","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2603","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2603.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2603.patch"},"body":"Close #2598.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2602\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2602","id":938555712,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0OTE5MjMy","number":2602,"title":"Remove import of transformers","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-07T06:58:18Z","updated_at":"2021-07-12T14:10:22Z","closed_at":"2021-07-07T08:28:51Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2602","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2602","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2602.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2602.patch"},"body":"When pickling a tokenizer within multiprocessing, check that is instance of transformers PreTrainedTokenizerBase without importing transformers.\r\n\r\nRelated to huggingface\/transformers#12549 and #502.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2601\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2601","id":938096396,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0NTQyNjY5","number":2601,"title":"Fix `filter` with multiprocessing in case all samples are discarded","user":{"login":"mxschmdt","id":4904985,"node_id":"MDQ6VXNlcjQ5MDQ5ODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4904985?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mxschmdt","html_url":"https:\/\/github.com\/mxschmdt","followers_url":"https:\/\/api.github.com\/users\/mxschmdt\/followers","following_url":"https:\/\/api.github.com\/users\/mxschmdt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mxschmdt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mxschmdt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mxschmdt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mxschmdt\/orgs","repos_url":"https:\/\/api.github.com\/users\/mxschmdt\/repos","events_url":"https:\/\/api.github.com\/users\/mxschmdt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mxschmdt\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-06T17:06:28Z","updated_at":"2021-07-12T14:10:35Z","closed_at":"2021-07-07T12:50:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2601","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2601","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2601.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2601.patch"},"body":"Fixes #2600 \r\n\r\nAlso I moved the check for `num_proc` larger than dataset size added in #2566 up so that multiprocessing is not used with one process.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2600\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2600","id":938086745,"node_id":"MDU6SXNzdWU5MzgwODY3NDU=","number":2600,"title":"Crash when using multiprocessing (`num_proc` > 1) on `filter` and all samples are discarded","user":{"login":"mxschmdt","id":4904985,"node_id":"MDQ6VXNlcjQ5MDQ5ODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4904985?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mxschmdt","html_url":"https:\/\/github.com\/mxschmdt","followers_url":"https:\/\/api.github.com\/users\/mxschmdt\/followers","following_url":"https:\/\/api.github.com\/users\/mxschmdt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mxschmdt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mxschmdt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mxschmdt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mxschmdt\/orgs","repos_url":"https:\/\/api.github.com\/users\/mxschmdt\/repos","events_url":"https:\/\/api.github.com\/users\/mxschmdt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mxschmdt\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-06T16:53:25Z","updated_at":"2021-07-07T12:50:31Z","closed_at":"2021-07-07T12:50:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nIf `filter` is applied to a dataset using multiprocessing (`num_proc` > 1) and all sharded datasets are empty afterwards (due to all samples being discarded), the program crashes.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import Dataset\r\ndata = Dataset.from_dict({'id': [0,1]})\r\ndata.filter(lambda x: False, num_proc=2)\r\n```\r\n\r\n## Expected results\r\nAn empty table should be returned without crashing.\r\n\r\n## Actual results\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 185, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/fingerprint.py\", line 397, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 2143, in filter\r\n return self.map(\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1738, in map\r\n result = concatenate_datasets(transformed_shards)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 3267, in concatenate_datasets\r\n table = concat_tables(tables_to_concat, axis=axis)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/table.py\", line 853, in concat_tables\r\n return ConcatenationTable.from_tables(tables, axis=axis)\r\n File \"\/home\/user\/venv\/lib\/python3.8\/site-packages\/datasets\/table.py\", line 713, in from_tables\r\n blocks = to_blocks(tables[0])\r\nIndexError: list index out of range\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.9.0\r\n- Platform: Linux-5.12.11-300.fc34.x86_64-x86_64-with-glibc2.2.5\r\n- Python version: 3.8.10\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2599\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2599","id":937980229,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0NDQ2MTYx","number":2599,"title":"Update processing.rst with other export formats","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-06T14:50:38Z","updated_at":"2021-07-12T14:10:16Z","closed_at":"2021-07-07T08:05:48Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2599","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2599","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2599.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2599.patch"},"body":"Add other supported export formats than CSV in the docs.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2598\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2598","id":937930632,"node_id":"MDU6SXNzdWU5Mzc5MzA2MzI=","number":2598,"title":"Unable to download omp dataset","user":{"login":"erikadistefano","id":25797960,"node_id":"MDQ6VXNlcjI1Nzk3OTYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25797960?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/erikadistefano","html_url":"https:\/\/github.com\/erikadistefano","followers_url":"https:\/\/api.github.com\/users\/erikadistefano\/followers","following_url":"https:\/\/api.github.com\/users\/erikadistefano\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/erikadistefano\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/erikadistefano\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/erikadistefano\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/erikadistefano\/orgs","repos_url":"https:\/\/api.github.com\/users\/erikadistefano\/repos","events_url":"https:\/\/api.github.com\/users\/erikadistefano\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/erikadistefano\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-07-06T14:00:52Z","updated_at":"2021-07-07T12:56:35Z","closed_at":"2021-07-07T12:56:35Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe omp dataset cannot be downloaded because of a DuplicatedKeysError\r\n\r\n## Steps to reproduce the bug\r\nfrom datasets import load_dataset\r\nomp = load_dataset('omp', 'posts_labeled')\r\nprint(omp)\r\n\r\n## Expected results\r\nThis code should download the omp dataset and print the dictionary\r\n\r\n## Actual results\r\nDownloading and preparing dataset omp\/posts_labeled (download: 1.27 MiB, generated: 13.31 MiB, post-processed: Unknown size, total: 14.58 MiB) to \/home\/erika_distefano\/.cache\/huggingface\/datasets\/omp\/posts_labeled\/1.1.0\/2fe5b067be3bff1d4588d5b0cbb9b5b22ae1b9d5b026a8ff572cd389f862735b...\r\n0 examples [00:00, ? examples\/s]2021-07-06 09:43:55.868815: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library libcudart.so.11.0\r\nTraceback (most recent call last): \r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 990, in _prepare_split\r\n writer.write(example, key)\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 338, in write\r\n self.check_duplicate_keys()\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 349, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 3326\r\nKeys should be unique and deterministic in nature\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"hf_datasets.py\", line 32, in \r\n omp = load_dataset('omp', 'posts_labeled')\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py\", line 992, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 409, in finalize\r\n self.check_duplicate_keys()\r\n File \"\/home\/erika_distefano\/.local\/lib\/python3.6\/site-packages\/datasets\/arrow_writer.py\", line 349, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 3326\r\nKeys should be unique and deterministic in nature\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: Ubuntu 18.04.4 LTS\r\n- Python version: 3.6.9\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2597\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2597","id":937917770,"node_id":"MDExOlB1bGxSZXF1ZXN0Njg0Mzk0MDIz","number":2597,"title":"Remove redundant prepare_module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-06T13:47:45Z","updated_at":"2021-07-12T14:10:52Z","closed_at":"2021-07-07T13:01:46Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2597","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2597","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2597.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2597.patch"},"body":"I have noticed that after implementing `load_dataset_builder` (#2500), there is a redundant call to `prepare_module`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2596\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2596","id":937598914,"node_id":"MDU6SXNzdWU5Mzc1OTg5MTQ=","number":2596,"title":"Transformer Class on dataset","user":{"login":"arita37","id":18707623,"node_id":"MDQ6VXNlcjE4NzA3NjIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18707623?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arita37","html_url":"https:\/\/github.com\/arita37","followers_url":"https:\/\/api.github.com\/users\/arita37\/followers","following_url":"https:\/\/api.github.com\/users\/arita37\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arita37\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arita37\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arita37\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arita37\/orgs","repos_url":"https:\/\/api.github.com\/users\/arita37\/repos","events_url":"https:\/\/api.github.com\/users\/arita37\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arita37\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-07-06T07:27:15Z","updated_at":"2021-07-08T08:22:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Just wondering if you have intenttion to create\r\n\r\nTransformerClass :\r\n dataset --> dataset\r\n\r\nand make determnistic transformation (ie not fit).\r\n\r\n\r\n\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2595\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2595","id":937483120,"node_id":"MDU6SXNzdWU5Mzc0ODMxMjA=","number":2595,"title":"ModuleNotFoundError: No module named 'datasets.tasks' while importing common voice datasets","user":{"login":"profsatwinder","id":41314912,"node_id":"MDQ6VXNlcjQxMzE0OTEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/41314912?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/profsatwinder","html_url":"https:\/\/github.com\/profsatwinder","followers_url":"https:\/\/api.github.com\/users\/profsatwinder\/followers","following_url":"https:\/\/api.github.com\/users\/profsatwinder\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/profsatwinder\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/profsatwinder\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/profsatwinder\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/profsatwinder\/orgs","repos_url":"https:\/\/api.github.com\/users\/profsatwinder\/repos","events_url":"https:\/\/api.github.com\/users\/profsatwinder\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/profsatwinder\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-06T03:20:55Z","updated_at":"2021-07-06T05:59:49Z","closed_at":"2021-07-06T05:59:49Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Error traceback:\r\n---------------------------------------------------------------------------\r\nModuleNotFoundError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset, load_metric\r\n 2 \r\n----> 3 common_voice_train = load_dataset(\"common_voice\", \"pa-IN\", split=\"train+validation\")\r\n 4 common_voice_test = load_dataset(\"common_voice\", \"pa-IN\", split=\"test\")\r\n\r\n9 frames\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/common_voice\/078d412587e9efeb0ae2e574da99c31e18844c496008d53dc5c60f4159ed639b\/common_voice.py in ()\r\n 19 \r\n 20 import datasets\r\n---> 21 from datasets.tasks import AutomaticSpeechRecognition\r\n 22 \r\n 23 \r\n\r\nModuleNotFoundError: No module named 'datasets.tasks'","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2594\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2594","id":937294772,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzODc0NjIz","number":2594,"title":"Fix BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T18:24:10Z","updated_at":"2021-07-06T04:59:38Z","closed_at":"2021-07-06T04:59:38Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2594","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2594","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2594.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2594.patch"},"body":"Fix BibTeX entry.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2593\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2593","id":937242137,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzODMwMjcy","number":2593,"title":"Support pandas 1.3.0 read_csv","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T16:40:04Z","updated_at":"2021-07-05T17:14:14Z","closed_at":"2021-07-05T17:14:14Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2593","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2593","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2593.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2593.patch"},"body":"Workaround for this issue in pandas 1.3.0 : https:\/\/github.com\/pandas-dev\/pandas\/issues\/42387\r\n\r\nThe csv reader raises an error:\r\n```python\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/pandas\/io\/parsers\/readers.py in _refine_defaults_read(dialect, delimiter, delim_whitespace, engine, sep, error_bad_lines, warn_bad_lines, on_bad_lines, names, prefix, defaults)\r\n 1304 \r\n 1305 if names is not lib.no_default and prefix is not lib.no_default:\r\n-> 1306 raise ValueError(\"Specified named and prefix; you can only specify one.\")\r\n 1307 \r\n 1308 kwds[\"names\"] = None if names is lib.no_default else names\r\n\r\nValueError: Specified named and prefix; you can only specify one.\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2592\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2592","id":937060559,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNjc2MjA4","number":2592,"title":"Add c4.noclean infos","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T12:51:40Z","updated_at":"2021-07-05T13:15:53Z","closed_at":"2021-07-05T13:15:52Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2592","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2592","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2592.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2592.patch"},"body":"Adding the data files checksums and the dataset size of the c4.noclean configuration of the C4 dataset","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2591\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2591","id":936957975,"node_id":"MDU6SXNzdWU5MzY5NTc5NzU=","number":2591,"title":"Cached dataset overflowing disk space","user":{"login":"BirgerMoell","id":1704131,"node_id":"MDQ6VXNlcjE3MDQxMzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1704131?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BirgerMoell","html_url":"https:\/\/github.com\/BirgerMoell","followers_url":"https:\/\/api.github.com\/users\/BirgerMoell\/followers","following_url":"https:\/\/api.github.com\/users\/BirgerMoell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BirgerMoell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BirgerMoell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BirgerMoell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BirgerMoell\/orgs","repos_url":"https:\/\/api.github.com\/users\/BirgerMoell\/repos","events_url":"https:\/\/api.github.com\/users\/BirgerMoell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BirgerMoell\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-07-05T10:43:19Z","updated_at":"2021-07-19T09:08:19Z","closed_at":"2021-07-19T09:08:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I'm training a Swedish Wav2vec2 model on a Linux GPU and having issues that the huggingface cached dataset folder is completely filling up my disk space (I'm training on a dataset of around 500 gb).\r\n\r\nThe cache folder is 500gb (and now my disk space is full).\r\n\r\nIs there a way to toggle caching or set the caching to be stored on a different device (I have another drive with 4 tb that could hold the caching files).\r\n\r\nThis might not technically be a bug, but I was unsure and I felt that the bug was the closest one.\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 121, in worker\r\n result = (True, func(*args, **kwds))\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 186, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 397, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1983, in _map_single\r\n writer.finalize()\r\n File \"\/home\/birger\/miniconda3\/envs\/wav2vec2\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 418, in finalize\r\n self.pa_writer.close()\r\n File \"pyarrow\/ipc.pxi\", line 402, in pyarrow.lib._CRecordBatchWriter.close\r\n File \"pyarrow\/error.pxi\", line 97, in pyarrow.lib.check_status\r\nOSError: [Errno 28] Error writing bytes to file. Detail: [errno 28] No space left on device\r\n\"\"\"\r\n\r\nThe above exception was the direct cause of the following exception:\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2590\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2590","id":936954348,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNTg1MDg2","number":2590,"title":"Add language tags","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T10:39:57Z","updated_at":"2021-07-05T10:58:48Z","closed_at":"2021-07-05T10:58:48Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2590","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2590","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2590.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2590.patch"},"body":"This PR adds some missing language tags needed for ASR datasets in #2565 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2589\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2589","id":936825060,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNDc0OTQ0","number":2589,"title":"Support multilabel metrics","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":2,"created_at":"2021-07-05T08:19:25Z","updated_at":"2021-07-12T14:12:10Z","closed_at":"2021-07-08T08:40:15Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2589","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2589","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2589.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2589.patch"},"body":"Currently, multilabel metrics are not supported because `predictions` and `references` are defined as `Value(\"int32\")`.\r\n\r\nThis PR creates a new feature type `OptionalSequence` which can act as either `Value(\"int32\")` or `Sequence(Value(\"int32\"))`, depending on the data passed.\r\n\r\n\r\nClose #2554.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2588\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2588","id":936795541,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNDQ5Njky","number":2588,"title":"Fix test_is_small_dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-05T07:46:26Z","updated_at":"2021-07-12T14:10:11Z","closed_at":"2021-07-06T17:09:30Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2588","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2588","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2588.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2588.patch"},"body":"Remove environment variable fixture `env_max_in_memory_dataset_size`. This fixture does not work because env variable is read in datasets.config when first loading datasets, and it is never reread during tests.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2587\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2587","id":936771339,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNDI5NjQy","number":2587,"title":"Add aiohttp to tests extras require","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-05T07:14:01Z","updated_at":"2021-07-05T09:04:38Z","closed_at":"2021-07-05T09:04:38Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2587","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2587","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2587.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2587.patch"},"body":"Currently, none of the streaming tests are runned within our CI test suite, because the streaming tests require aiohttp and this is missing from our tests extras require dependencies.\r\n\r\nOur CI test suite should be exhaustive and test all the library functionalities.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2586\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2586","id":936747588,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgzNDEwMDU3","number":2586,"title":"Fix misalignment in SQuAD","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-05T06:42:20Z","updated_at":"2021-07-12T14:11:10Z","closed_at":"2021-07-07T13:18:51Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2586","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2586","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2586.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2586.patch"},"body":"Fix misalignment between:\r\n- the answer text and\r\n- the answer_start within the context\r\n\r\nby keeping original leading blank spaces in the context.\r\n\r\nFix #2585.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2585\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2585","id":936484419,"node_id":"MDU6SXNzdWU5MzY0ODQ0MTk=","number":2585,"title":"sqaud_v2 dataset contains misalignment between the answer text and the context value at the answer index","user":{"login":"mmajurski","id":9354454,"node_id":"MDQ6VXNlcjkzNTQ0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9354454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mmajurski","html_url":"https:\/\/github.com\/mmajurski","followers_url":"https:\/\/api.github.com\/users\/mmajurski\/followers","following_url":"https:\/\/api.github.com\/users\/mmajurski\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mmajurski\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mmajurski\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mmajurski\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mmajurski\/orgs","repos_url":"https:\/\/api.github.com\/users\/mmajurski\/repos","events_url":"https:\/\/api.github.com\/users\/mmajurski\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mmajurski\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-07-04T15:39:49Z","updated_at":"2021-07-07T13:18:51Z","closed_at":"2021-07-07T13:18:51Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe built in huggingface squad_v2 dataset that you can access via datasets.load_dataset contains mis-alignment between the answers['text'] and the characters in the context at the location specified by answers['answer_start'].\r\n\r\nFor example:\r\nid = '56d1f453e7d4791d009025bd'\r\nanswers = {'text': ['Pure Land'], 'answer_start': [146]}\r\nHowever the actual text in context at location 146 is 'ure Land,'\r\nWhich is an off-by-one error from the correct answer.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\n\r\ndef check_context_answer_alignment(example):\r\n for a_idx in range(len(example['answers']['text'])):\r\n # check raw dataset for answer consistency between context and answer\r\n answer_text = example['answers']['text'][a_idx]\r\n a_st_idx = example['answers']['answer_start'][a_idx]\r\n a_end_idx = a_st_idx + len(example['answers']['text'][a_idx])\r\n answer_text_from_context = example['context'][a_st_idx:a_end_idx]\r\n if answer_text != answer_text_from_context:\r\n #print(example['id'])\r\n return False\r\n return True\r\n\r\ndataset = datasets.load_dataset('squad_v2', split='train', keep_in_memory=True)\r\n\r\nstart_len = len(dataset)\r\ndataset = dataset.filter(check_context_answer_alignment,\r\n num_proc=1,\r\n keep_in_memory=True)\r\nend_len = len(dataset)\r\nprint('{} instances contain mis-alignment between the answer text and answer index.'.format(start_len - end_len))\r\n```\r\n\r\n## Expected results\r\nThis code should result in 0 rows being filtered out from the dataset.\r\n\r\n## Actual results\r\nThis filter command results in 258 rows being flagged as containing a discrepancy between the text contained within answers['text'] and the text in example['context'] at the answers['answer_start'] location.\r\n\r\nThis code will reproduce the problem and produce the following count:\r\n\"258 instances contain mis-alignment between the answer text and answer index.\"\r\n\r\n## Environment info\r\nSteps to rebuilt the Conda environment:\r\n```\r\n# create a virtual environment to stuff all these packages into\r\nconda create -n round8 python=3.8 -y\r\n\r\n# activate the virtual environment\r\nconda activate round8\r\n\r\n# install pytorch (best done through conda to handle cuda dependencies)\r\nconda install pytorch torchvision torchtext cudatoolkit=11.1 -c pytorch-lts -c nvidia\r\n\r\npip install jsonpickle transformers datasets matplotlib\r\n```\r\n\r\nOS: Ubuntu 20.04\r\nPython 3.8\r\n\r\nResult of `conda env export`:\r\n```\r\nname: round8\r\nchannels:\r\n - pytorch-lts\r\n - nvidia\r\n - defaults\r\ndependencies:\r\n - _libgcc_mutex=0.1=main\r\n - _openmp_mutex=4.5=1_gnu\r\n - blas=1.0=mkl\r\n - brotlipy=0.7.0=py38h27cfd23_1003\r\n - bzip2=1.0.8=h7b6447c_0\r\n - ca-certificates=2021.5.25=h06a4308_1\r\n - certifi=2021.5.30=py38h06a4308_0\r\n - cffi=1.14.5=py38h261ae71_0\r\n - chardet=4.0.0=py38h06a4308_1003\r\n - cryptography=3.4.7=py38hd23ed53_0\r\n - cudatoolkit=11.1.74=h6bb024c_0\r\n - ffmpeg=4.2.2=h20bf706_0\r\n - freetype=2.10.4=h5ab3b9f_0\r\n - gmp=6.2.1=h2531618_2\r\n - gnutls=3.6.15=he1e5248_0\r\n - idna=2.10=pyhd3eb1b0_0\r\n - intel-openmp=2021.2.0=h06a4308_610\r\n - jpeg=9b=h024ee3a_2\r\n - lame=3.100=h7b6447c_0\r\n - lcms2=2.12=h3be6417_0\r\n - ld_impl_linux-64=2.35.1=h7274673_9\r\n - libffi=3.3=he6710b0_2\r\n - libgcc-ng=9.3.0=h5101ec6_17\r\n - libgomp=9.3.0=h5101ec6_17\r\n - libidn2=2.3.1=h27cfd23_0\r\n - libopus=1.3.1=h7b6447c_0\r\n - libpng=1.6.37=hbc83047_0\r\n - libstdcxx-ng=9.3.0=hd4cf53a_17\r\n - libtasn1=4.16.0=h27cfd23_0\r\n - libtiff=4.2.0=h85742a9_0\r\n - libunistring=0.9.10=h27cfd23_0\r\n - libuv=1.40.0=h7b6447c_0\r\n - libvpx=1.7.0=h439df22_0\r\n - libwebp-base=1.2.0=h27cfd23_0\r\n - lz4-c=1.9.3=h2531618_0\r\n - mkl=2021.2.0=h06a4308_296\r\n - mkl-service=2.3.0=py38h27cfd23_1\r\n - mkl_fft=1.3.0=py38h42c9631_2\r\n - mkl_random=1.2.1=py38ha9443f7_2\r\n - ncurses=6.2=he6710b0_1\r\n - nettle=3.7.3=hbbd107a_1\r\n - ninja=1.10.2=hff7bd54_1\r\n - numpy=1.20.2=py38h2d18471_0\r\n - numpy-base=1.20.2=py38hfae3a4d_0\r\n - olefile=0.46=py_0\r\n - openh264=2.1.0=hd408876_0\r\n - openssl=1.1.1k=h27cfd23_0\r\n - pillow=8.2.0=py38he98fc37_0\r\n - pip=21.1.2=py38h06a4308_0\r\n - pycparser=2.20=py_2\r\n - pyopenssl=20.0.1=pyhd3eb1b0_1\r\n - pysocks=1.7.1=py38h06a4308_0\r\n - python=3.8.10=h12debd9_8\r\n - pytorch=1.8.1=py3.8_cuda11.1_cudnn8.0.5_0\r\n - readline=8.1=h27cfd23_0\r\n - requests=2.25.1=pyhd3eb1b0_0\r\n - setuptools=52.0.0=py38h06a4308_0\r\n - six=1.16.0=pyhd3eb1b0_0\r\n - sqlite=3.35.4=hdfb4753_0\r\n - tk=8.6.10=hbc83047_0\r\n - torchtext=0.9.1=py38\r\n - torchvision=0.9.1=py38_cu111\r\n - typing_extensions=3.7.4.3=pyha847dfd_0\r\n - urllib3=1.26.4=pyhd3eb1b0_0\r\n - wheel=0.36.2=pyhd3eb1b0_0\r\n - x264=1!157.20191217=h7b6447c_0\r\n - xz=5.2.5=h7b6447c_0\r\n - zlib=1.2.11=h7b6447c_3\r\n - zstd=1.4.9=haebb681_0\r\n - pip:\r\n - click==8.0.1\r\n - cycler==0.10.0\r\n - datasets==1.8.0\r\n - dill==0.3.4\r\n - filelock==3.0.12\r\n - fsspec==2021.6.0\r\n - huggingface-hub==0.0.8\r\n - joblib==1.0.1\r\n - jsonpickle==2.0.0\r\n - kiwisolver==1.3.1\r\n - matplotlib==3.4.2\r\n - multiprocess==0.70.12.2\r\n - packaging==20.9\r\n - pandas==1.2.4\r\n - pyarrow==3.0.0\r\n - pyparsing==2.4.7\r\n - python-dateutil==2.8.1\r\n - pytz==2021.1\r\n - regex==2021.4.4\r\n - sacremoses==0.0.45\r\n - tokenizers==0.10.3\r\n - tqdm==4.49.0\r\n - transformers==4.6.1\r\n - xxhash==2.0.2\r\nprefix: \/home\/mmajurski\/anaconda3\/envs\/round8\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2584\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2584","id":936049736,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyODY2Njc1","number":2584,"title":"wi_locness: reference latest leaderboard on codalab","user":{"login":"aseifert","id":4944799,"node_id":"MDQ6VXNlcjQ5NDQ3OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aseifert","html_url":"https:\/\/github.com\/aseifert","followers_url":"https:\/\/api.github.com\/users\/aseifert\/followers","following_url":"https:\/\/api.github.com\/users\/aseifert\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aseifert\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aseifert\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aseifert\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aseifert\/orgs","repos_url":"https:\/\/api.github.com\/users\/aseifert\/repos","events_url":"https:\/\/api.github.com\/users\/aseifert\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aseifert\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-02T20:26:22Z","updated_at":"2021-07-05T09:06:14Z","closed_at":"2021-07-05T09:06:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2584","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2584","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2584.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2584.patch"},"body":"The dataset's author asked me to put this codalab link into the dataset's README.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2583\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2583","id":936034976,"node_id":"MDU6SXNzdWU5MzYwMzQ5NzY=","number":2583,"title":"Error iteration over IterableDataset using Torch DataLoader","user":{"login":"LeenaShekhar","id":12227436,"node_id":"MDQ6VXNlcjEyMjI3NDM2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12227436?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LeenaShekhar","html_url":"https:\/\/github.com\/LeenaShekhar","followers_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/followers","following_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/orgs","repos_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/repos","events_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LeenaShekhar\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-07-02T19:55:58Z","updated_at":"2021-07-20T09:04:45Z","closed_at":"2021-07-05T23:48:23Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI have an IterableDataset (created using streaming=True) and I am trying to create batches using Torch DataLoader class by passing this IterableDataset to it. This throws error which is pasted below. I can do the same by using Torch IterableDataset. One thing I noticed is that in the former case when I look at the dataloader.sampler class I get torch.utils.data.sampler.SequentialSampler while the latter one gives torch.utils.data.dataloader._InfiniteConstantSampler. \r\n\r\nI am not sure if this is how it is meant to be used, but that's what seemed reasonable to me. \r\n\r\n## Steps to reproduce the bug\r\n\r\n1. Does not work.\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> dataset = load_dataset('oscar', \"unshuffled_deduplicated_en\", split='train', streaming=True)\r\n>>> dataloader = torch.utils.data.DataLoader(dataset, batch_size=4)\r\n>>> dataloader.sampler\r\n\r\n>>> for batch in dataloader:\r\n... print(batch)\r\n```\r\n\r\n2. Works.\r\n```python\r\nimport torch\r\nfrom torch.utils.data import Dataset, IterableDataset, DataLoader\r\nclass CustomIterableDataset(IterableDataset):\r\n 'Characterizes a dataset for PyTorch'\r\n def __init__(self, data):\r\n 'Initialization'\r\n self.data = data\r\n\r\n\r\n def __iter__(self):\r\n return iter(self.data)\r\n\r\n\r\ndata = list(range(12))\r\ndataset = CustomIterableDataset(data)\r\ndataloader = DataLoader(dataset, batch_size=4)\r\nprint(\"dataloader: \", dataloader.sampler)\r\nfor batch in dataloader:\r\n print(batch)\r\n```\r\n\r\n## Expected results\r\nTo get batches of data with the batch size as 4. Output from the latter one (2) though Datasource is different here so actual data is different.\r\ndataloader: \r\ntensor([0, 1, 2, 3])\r\ntensor([4, 5, 6, 7])\r\ntensor([ 8, 9, 10, 11])\r\n\r\n## Actual results\r\n\r\n\r\n...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/dataloader.py\", line 435, in __next__\r\n data = self._next_data()\r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/dataloader.py\", line 474, in _next_data\r\n index = self._next_index() # may raise StopIteration\r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/dataloader.py\", line 427, in _next_index\r\n return next(self._sampler_iter) # may raise StopIteration\r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/sampler.py\", line 227, in __iter__\r\n for idx in self.sampler:\r\n File \"\/data\/leshekha\/lib\/HFDatasets\/lib\/python3.6\/site-packages\/torch\/utils\/data\/sampler.py\", line 67, in __iter__\r\n return iter(range(len(self.data_source)))\r\nTypeError: object of type 'IterableDataset' has no len()\r\n\r\n## Environment info\r\n\r\n- `datasets` version: '1.8.1.dev0'\r\n- Platform: Linux\r\n- Python version: Python 3.6.8\r\n- PyArrow version: '3.0.0'\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2582\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2582","id":935859104,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyNzAzNzg3","number":2582,"title":"Add skip and take","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-07-02T15:10:19Z","updated_at":"2021-07-05T16:06:40Z","closed_at":"2021-07-05T16:06:39Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2582","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2582","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2582.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2582.patch"},"body":"As discussed in https:\/\/github.com\/huggingface\/datasets\/pull\/2375#discussion_r657084544 I added the `IterableDataset.skip` and `IterableDataset.take` methods that allows to do basic splitting of iterable datasets.\r\n\r\nYou can create new dataset with the first `n` examples using `IterableDataset.take()`, or you can get a dataset with the rest of the examples by skipping the first `n` examples with `IterableDataset.skip()`\r\n\r\nOne implementation detail:\r\n\r\nUsing `take` (or `skip`) prevents future dataset shuffling from shuffling the dataset shards, otherwise the taken examples could come from other shards. In this case it only uses the shuffle buffer.\r\nI would have loved to allow the shards of the taken examples to be shuffled anyway, but since we don't know in advance the length of each shard we don't know what shards to take or skip.\r\nI think this is ok though since users can shuffle before doing take or skip. I mentioned this in the documentation\r\n\r\ncc @vblagoje @lewtun ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2581\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2581","id":935783588,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyNjQwMDY4","number":2581,"title":"Faster search_batch for ElasticsearchIndex due to threading","user":{"login":"mwrzalik","id":1376337,"node_id":"MDQ6VXNlcjEzNzYzMzc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1376337?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mwrzalik","html_url":"https:\/\/github.com\/mwrzalik","followers_url":"https:\/\/api.github.com\/users\/mwrzalik\/followers","following_url":"https:\/\/api.github.com\/users\/mwrzalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mwrzalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mwrzalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mwrzalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mwrzalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/mwrzalik\/repos","events_url":"https:\/\/api.github.com\/users\/mwrzalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mwrzalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":0,"created_at":"2021-07-02T13:42:07Z","updated_at":"2021-07-12T14:13:46Z","closed_at":"2021-07-12T09:52:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2581","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2581","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2581.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2581.patch"},"body":"Hey, \r\nI think it makes sense to perform search_batch threaded, so ES can perform search in parallel.\r\nCheers!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2580\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2580","id":935767421,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyNjI2MTkz","number":2580,"title":"Fix Counter import","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-02T13:21:48Z","updated_at":"2021-07-02T14:37:47Z","closed_at":"2021-07-02T14:37:46Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2580","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2580","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2580.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2580.patch"},"body":"Import from `collections` instead of `typing`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2579\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2579","id":935486894,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyMzkyNjYx","number":2579,"title":"Fix BibTeX entry","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-02T07:10:40Z","updated_at":"2021-07-02T07:33:44Z","closed_at":"2021-07-02T07:33:44Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2579","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2579","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2579.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2579.patch"},"body":"Add missing contributor to BibTeX entry.\r\n\r\ncc: @abhishekkrthakur @thomwolf ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2578\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2578","id":935187497,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgyMTQ0OTY2","number":2578,"title":"Support Zstandard compressed files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-07-01T20:22:34Z","updated_at":"2021-08-11T14:46:24Z","closed_at":"2021-07-05T10:50:27Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2578","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2578","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2578.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2578.patch"},"body":"Close #2572.\r\n\r\ncc: @thomwolf ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2576\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2576","id":934986761,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgxOTc5MTA1","number":2576,"title":"Add mC4","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-01T15:51:25Z","updated_at":"2021-07-02T14:50:56Z","closed_at":"2021-07-02T14:50:55Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2576","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2576","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2576.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2576.patch"},"body":"AllenAI is now hosting the processed C4 and mC4 dataset in this repo: https:\/\/huggingface.co\/datasets\/allenai\/c4\r\nThanks a lot to them !\r\n\r\nIn this PR I added the mC4 dataset builder. It supports 108 languages\r\n\r\nYou can load it with\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nen_mc4 = load_dataset(\"mc4\", \"en\")\r\nfr_mc4 = load_dataset(\"mc4\", \"fr\")\r\nen_and_fr_mc4 = load_dataset(\"mc4\", languages=[\"en\", \"fr\"])\r\n```\r\n\r\nIt also supports streaming, if you don't want to download hundreds of GB of data:\r\n```python\r\nen_mc4 = load_dataset(\"mc4\", \"en\", streaming=True)\r\n```\r\n\r\nRegarding the dataset_infos.json, I will add them once I have them.\r\n\r\nAlso we can work on the dataset card at that will be at https:\/\/huggingface.co\/datasets\/mc4\r\nFor now I just added a link to https:\/\/huggingface.co\/datasets\/allenai\/c4 as well as a few sections","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2575\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2575","id":934876496,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgxODg0OTgy","number":2575,"title":"Add C4","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-01T13:58:08Z","updated_at":"2021-07-02T14:50:23Z","closed_at":"2021-07-02T14:50:23Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2575","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2575","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2575.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2575.patch"},"body":"The old code for the C4 dataset was to generate the C4 with Apache Beam, as in Tensorflow Datasets.\r\nHowever AllenAI is now hosting the processed C4 dataset in this repo: https:\/\/huggingface.co\/datasets\/allenai\/c4\r\nThanks a lot to them for their amazing work !\r\n\r\nIn this PR I changed the script to download and prepare the data directly from this repo.\r\nIt has 4 variants: en, en.noblocklist, en.noclean, realnewslike\r\n\r\nYou can load it with\r\n```python\r\nfrom datasets import load_dataset\r\n\r\nc4 = load_dataset(\"c4\", \"en\")\r\n```\r\n\r\nIt also supports streaming, if you don't want to download hundreds of GB of data:\r\n```python\r\nc4 = load_dataset(\"c4\", \"en\", streaming=True)\r\n```\r\n\r\nRegarding the dataset_infos.json, I haven't added the infos for en.noclean. I will add them once I have them.\r\n\r\nAlso we can work on the dataset card at https:\/\/huggingface.co\/datasets\/c4\r\nFor now I just added a link to https:\/\/huggingface.co\/datasets\/allenai\/c4 as well as a few sections","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2574\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2574","id":934632378,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgxNjczMzYy","number":2574,"title":"Add streaming in load a dataset docs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-07-01T09:32:53Z","updated_at":"2021-07-01T14:12:22Z","closed_at":"2021-07-01T14:12:21Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2574","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2574","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2574.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2574.patch"},"body":"Mention dataset streaming on the \"loading a dataset\" page of the documentation","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2573\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2573","id":934584745,"node_id":"MDU6SXNzdWU5MzQ1ODQ3NDU=","number":2573,"title":"Finding right block-size with JSON loading difficult for user","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-07-01T08:48:35Z","updated_at":"2021-07-01T19:10:53Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As reported by @thomwolf, while loading a JSON Lines file with \"json\" loading script, he gets\r\n> json.decoder.JSONDecodeError: Extra data: line 2 column 1 (char 383)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2572\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2572","id":934573767,"node_id":"MDU6SXNzdWU5MzQ1NzM3Njc=","number":2572,"title":"Support Zstandard compressed files","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-07-01T08:37:04Z","updated_at":"2021-07-05T10:50:27Z","closed_at":"2021-07-05T10:50:27Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Add support for Zstandard compressed files: https:\/\/facebook.github.io\/zstd\/","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2571\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2571","id":933791018,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwOTQ2NzQ1","number":2571,"title":"Filter expected warning log from transformers","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-30T14:48:19Z","updated_at":"2021-07-02T04:08:17Z","closed_at":"2021-07-02T04:08:17Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2571","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2571","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2571.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2571.patch"},"body":"Close #2569.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2570\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2570","id":933402521,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwNjEzNzc0","number":2570,"title":"Minor fix docs format for bertscore","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-30T07:42:12Z","updated_at":"2021-06-30T15:31:01Z","closed_at":"2021-06-30T15:31:01Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2570","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2570","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2570.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2570.patch"},"body":"Minor fix docs format for bertscore:\r\n- link to README\r\n- format of KWARGS_DESCRIPTION","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2569\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2569","id":933015797,"node_id":"MDU6SXNzdWU5MzMwMTU3OTc=","number":2569,"title":"Weights of model checkpoint not initialized for RobertaModel for Bertscore","user":{"login":"suzyahyah","id":2980993,"node_id":"MDQ6VXNlcjI5ODA5OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2980993?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/suzyahyah","html_url":"https:\/\/github.com\/suzyahyah","followers_url":"https:\/\/api.github.com\/users\/suzyahyah\/followers","following_url":"https:\/\/api.github.com\/users\/suzyahyah\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/suzyahyah\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/suzyahyah\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/suzyahyah\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/suzyahyah\/orgs","repos_url":"https:\/\/api.github.com\/users\/suzyahyah\/repos","events_url":"https:\/\/api.github.com\/users\/suzyahyah\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/suzyahyah\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-06-29T18:55:23Z","updated_at":"2021-07-01T07:08:59Z","closed_at":"2021-06-30T07:35:49Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"When applying bertscore out of the box, \r\n\r\n```Some weights of the model checkpoint at roberta-large were not used when initializing RobertaModel: ['lm_head.decoder.weight', 'lm_head.bias', 'lm_head.dense.bias', 'lm_head.layer_norm.bias', 'lm_head.dense.weight', 'lm_head.layer_norm.weight']```\r\n\r\nFollowing the typical usage from https:\/\/huggingface.co\/docs\/datasets\/loading_metrics.html\r\n\r\n```\r\nfrom datasets import load_metric\r\nmetric = load_metric('bertscore')\r\n\r\n# Example of typical usage\r\nfor batch in dataset:\r\n inputs, references = batch\r\n predictions = model(inputs)\r\n metric.add_batch(predictions=predictions, references=references)\r\nscore = metric.compute(lang=\"en\")\r\n#score = metric.compute(model_type=\"roberta-large\") # gives the same error\r\n```\r\n\r\nI am concerned about this because my usage shouldn't require any further fine-tuning and most people would expect to use BertScore out of the box? I realised the huggingface code is a wrapper around https:\/\/github.com\/Tiiiger\/bert_score, but I think this repo is anyway relying on the model code and weights from huggingface repo.... \r\n\r\n## Environment info\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.4.0-1041-aws-x86_64-with-glibc2.27\r\n- Python version: 3.9.5\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2568\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2568","id":932934795,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwMjE5MDU2","number":2568,"title":"Add interleave_datasets for map-style datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-29T17:19:24Z","updated_at":"2021-07-01T09:33:34Z","closed_at":"2021-07-01T09:33:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2568","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2568","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2568.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2568.patch"},"body":"### Add interleave_datasets for map-style datasets\r\n\r\nAdd support for map-style datasets (i.e. `Dataset` objects) in `interleave_datasets`.\r\nIt was only supporting iterable datasets (i.e. `IterableDataset` objects).\r\n\r\n### Implementation details\r\n\r\nIt works by concatenating the datasets and then re-order the indices to make the new dataset.\r\n\r\n### TODO\r\n- [x] tests\r\n- [x] docs\r\n\r\nClose #2563 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2567\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2567","id":932933536,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwMjE3OTY3","number":2567,"title":"Add ASR task and new languages to resources","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-29T17:18:01Z","updated_at":"2021-07-01T09:42:23Z","closed_at":"2021-07-01T09:42:09Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2567","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2567","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2567.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2567.patch"},"body":"This PR adds a new `automatic-speech-recognition` task to the list of supported tasks in `tasks.json` and also includes a few new languages missing from `common_voice`.\r\n\r\nNote: I used the [Papers with Code list](https:\/\/www.paperswithcode.com\/area\/speech\/speech-recognition) as inspiration for the ASR subtasks","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2566\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2566","id":932804725,"node_id":"MDExOlB1bGxSZXF1ZXN0NjgwMTA2NzM0","number":2566,"title":"fix Dataset.map when num_procs > num rows","user":{"login":"connor-mccarthy","id":55268212,"node_id":"MDQ6VXNlcjU1MjY4MjEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55268212?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/connor-mccarthy","html_url":"https:\/\/github.com\/connor-mccarthy","followers_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/followers","following_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/orgs","repos_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/repos","events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-29T15:07:07Z","updated_at":"2021-07-01T09:11:13Z","closed_at":"2021-07-01T09:11:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2566","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2566","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2566.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2566.patch"},"body":"closes #2470\r\n\r\n## Testing notes\r\nTo run updated tests:\r\n```sh\r\npytest tests\/test_arrow_dataset.py -k \"BaseDatasetTest and test_map_multiprocessing\" -s\r\n```\r\nWith Python code (to view warning):\r\n```python\r\nfrom datasets import Dataset\r\n\r\n\r\ndataset = Dataset.from_dict({\"x\": [\"sample\"]})\r\nprint(len(dataset))\r\ndataset.map(lambda x: x, num_proc=10)\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2565\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2565","id":932445439,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5Nzg3NTI4","number":2565,"title":"Inject templates for ASR datasets","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-29T10:02:01Z","updated_at":"2021-07-05T14:26:26Z","closed_at":"2021-07-05T14:26:26Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2565","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2565","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2565.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2565.patch"},"body":"This PR adds ASR templates for 5 of the most common speech datasets on the Hub, where \"common\" is defined by the number of models trained on them.\r\n\r\nI also fixed a bunch of the tags in the READMEs \ud83d\ude0e ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2564\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2564","id":932389639,"node_id":"MDU6SXNzdWU5MzIzODk2Mzk=","number":2564,"title":"concatenate_datasets for iterable datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-29T08:59:41Z","updated_at":"2021-06-29T08:59:41Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Currently `concatenate_datasets` only works for map-style `Dataset`.\r\n\r\nIt would be nice to have it work for `IterableDataset` objects as well.\r\n\r\nIt would simply chain the iterables of the iterable datasets.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2563\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2563","id":932387639,"node_id":"MDU6SXNzdWU5MzIzODc2Mzk=","number":2563,"title":"interleave_datasets for map-style datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-29T08:57:24Z","updated_at":"2021-07-01T09:33:33Z","closed_at":"2021-07-01T09:33:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Currently the `interleave_datasets` functions only works for `IterableDataset`.\r\nLet's make it work for map-style `Dataset` objects as well.\r\n\r\nIt would work the same way: either alternate between the datasets in order or randomly given probabilities specified by the user.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2562\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2562","id":932333436,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5NjkyMjQ2","number":2562,"title":"Minor fix in loading metrics docs","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-29T07:55:11Z","updated_at":"2021-06-29T17:21:22Z","closed_at":"2021-06-29T17:21:22Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2562","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2562","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2562.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2562.patch"},"body":"Make some minor fixes in \"Loading metrics\" docs.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2561\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2561","id":932321725,"node_id":"MDU6SXNzdWU5MzIzMjE3MjU=","number":2561,"title":"Existing cache for local dataset builder file updates is ignored with `ignore_verifications=True`","user":{"login":"apsdehal","id":3616806,"node_id":"MDQ6VXNlcjM2MTY4MDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3616806?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apsdehal","html_url":"https:\/\/github.com\/apsdehal","followers_url":"https:\/\/api.github.com\/users\/apsdehal\/followers","following_url":"https:\/\/api.github.com\/users\/apsdehal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apsdehal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apsdehal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apsdehal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apsdehal\/orgs","repos_url":"https:\/\/api.github.com\/users\/apsdehal\/repos","events_url":"https:\/\/api.github.com\/users\/apsdehal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apsdehal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-29T07:43:03Z","updated_at":"2021-06-30T12:55:24Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nIf i have local file defining a dataset builder class and I load it using `load_dataset` functionality, the existing cache is ignored whenever the file is update even with `ignore_verifications=True`. This slows down debugging and cache generator for very large datasets.\r\n\r\n## Steps to reproduce the bug\r\n\r\n- Create a local dataset builder class\r\n- load the local builder class file using `load_dataset` and let the cache build\r\n- update the file's content\r\n- The cache should rebuilt.\r\n\r\n## Expected results\r\n\r\nWith `ignore_verifications=True`, `load_dataset` should pick up existing cache.\r\n\r\n## Actual results\r\n\r\nCreates new cache.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.4.0-52-generic-x86_64-with-debian-bullseye-sid\r\n- Python version: 3.7.7\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2560\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2560","id":932143634,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5NTMyODk4","number":2560,"title":"fix Dataset.map when num_procs > num rows","user":{"login":"connor-mccarthy","id":55268212,"node_id":"MDQ6VXNlcjU1MjY4MjEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55268212?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/connor-mccarthy","html_url":"https:\/\/github.com\/connor-mccarthy","followers_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/followers","following_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/orgs","repos_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/repos","events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-29T02:24:11Z","updated_at":"2021-06-29T15:00:18Z","closed_at":"2021-06-29T14:53:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2560","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2560","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2560.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2560.patch"},"body":"closes #2470\r\n\r\n## Testing notes\r\nTo run updated tests:\r\n```sh\r\npytest tests\/test_arrow_dataset.py -k \"BaseDatasetTest and test_map_multiprocessing\" -s\r\n```\r\nWith Python code (to view warning):\r\n```python\r\nfrom datasets import Dataset\r\n\r\n\r\ndataset = Dataset.from_dict({\"x\": [\"sample\"]})\r\nprint(len(dataset))\r\ndataset.map(lambda x: x, num_proc=10)\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2559\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2559","id":931849724,"node_id":"MDU6SXNzdWU5MzE4NDk3MjQ=","number":2559,"title":"Memory usage consistently increases when processing a dataset with `.map`","user":{"login":"apsdehal","id":3616806,"node_id":"MDQ6VXNlcjM2MTY4MDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3616806?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/apsdehal","html_url":"https:\/\/github.com\/apsdehal","followers_url":"https:\/\/api.github.com\/users\/apsdehal\/followers","following_url":"https:\/\/api.github.com\/users\/apsdehal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/apsdehal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/apsdehal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/apsdehal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/apsdehal\/orgs","repos_url":"https:\/\/api.github.com\/users\/apsdehal\/repos","events_url":"https:\/\/api.github.com\/users\/apsdehal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/apsdehal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-28T18:31:58Z","updated_at":"2021-06-29T08:43:00Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI have a HF dataset with image paths stored in it and I am trying to load those image paths using `.map` with `num_proc=80`. I am noticing that the memory usage consistently keeps on increasing with time. I tried using `DEFAULT_WRITER_BATCH_SIZE=10` in the builder to decrease arrow writer's batch size but that doesn't seem to help.\r\n\r\n## Steps to reproduce the bug\r\n\r\nProviding code as it is would be hard. I can provide a MVP if that helps.\r\n\r\n## Expected results\r\n\r\nMemory usage should become consistent after some time following the launch of processing.\r\n\r\n## Actual results\r\n\r\nMemory usage keeps on increasing.\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.4.0-52-generic-x86_64-with-debian-bullseye-sid\r\n- Python version: 3.7.7\r\n- PyArrow version: 3.0.0","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2558\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2558","id":931736647,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5MTg0Njk1","number":2558,"title":"Update: WebNLG - update checksums","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-28T16:16:37Z","updated_at":"2021-06-28T17:23:17Z","closed_at":"2021-06-28T17:23:16Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2558","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2558","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2558.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2558.patch"},"body":"The master branch changed so I computed the new checksums.\r\n\r\nI also pinned a specific revision so that it doesn't happen again in the future.\r\n\r\nFix https:\/\/github.com\/huggingface\/datasets\/issues\/2553","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2557\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2557","id":931633823,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5MDk4ODg3","number":2557,"title":"Fix `fever` keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-28T14:27:02Z","updated_at":"2021-06-28T16:11:30Z","closed_at":"2021-06-28T16:11:29Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2557","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2557","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2557.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2557.patch"},"body":"The keys has duplicates since they were reset to 0 after each file.\r\n\r\nI fixed it by taking into account the file index as well.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2556\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2556","id":931595872,"node_id":"MDU6SXNzdWU5MzE1OTU4NzI=","number":2556,"title":"Better DuplicateKeysError error to help the user debug the issue","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-28T13:50:57Z","updated_at":"2021-06-28T13:50:57Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As mentioned in https:\/\/github.com\/huggingface\/datasets\/issues\/2552 it would be nice to improve the error message when a dataset fails to build because there are duplicate example keys.\r\n\r\nThe current one is\r\n```python\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 48\r\nKeys should be unique and deterministic in nature\r\n```\r\n\r\nand we could have something that guides the user to debugging the issue:\r\n```python\r\nDuplicateKeysError: both 42th and 1337th examples have the same keys `48`.\r\nPlease fix the dataset script at \r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2555\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2555","id":931585485,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc5MDU4ODM3","number":2555,"title":"Fix code_search_net keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-28T13:40:23Z","updated_at":"2021-06-28T14:10:35Z","closed_at":"2021-06-28T14:10:35Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2555","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2555","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2555.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2555.patch"},"body":"There were duplicate keys in the `code_search_net` dataset, as reported in https:\/\/github.com\/huggingface\/datasets\/issues\/2552\r\n\r\nI fixed the keys (it was an addition of the file and row indices, which was causing collisions)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2554\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2554","id":931453855,"node_id":"MDU6SXNzdWU5MzE0NTM4NTU=","number":2554,"title":"Multilabel metrics not supported","user":{"login":"GuillemGSubies","id":37592763,"node_id":"MDQ6VXNlcjM3NTkyNzYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37592763?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/GuillemGSubies","html_url":"https:\/\/github.com\/GuillemGSubies","followers_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/followers","following_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/orgs","repos_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/repos","events_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-28T11:09:46Z","updated_at":"2021-07-08T08:40:15Z","closed_at":"2021-07-08T08:40:15Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"When I try to use a metric like F1 macro I get the following error:\r\n\r\n```\r\nTypeError: int() argument must be a string, a bytes-like object or a number, not 'list'\r\n```\r\nThere is an explicit casting here:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/fc79f61cbbcfa0e8c68b28c0a8257f17e768a075\/src\/datasets\/features.py#L274\r\n\r\nAnd looks like this is because here\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/fc79f61cbbcfa0e8c68b28c0a8257f17e768a075\/metrics\/f1\/f1.py#L88\r\n\r\nthe features can only be integers, so we cannot use that F1 for multilabel. Instead, if I create the following F1 (ints replaced with sequence of ints), it will work:\r\n\r\n```python\r\nclass F1(datasets.Metric):\r\n def _info(self):\r\n return datasets.MetricInfo(\r\n description=_DESCRIPTION,\r\n citation=_CITATION,\r\n inputs_description=_KWARGS_DESCRIPTION,\r\n features=datasets.Features(\r\n {\r\n \"predictions\": datasets.Sequence(datasets.Value(\"int32\")),\r\n \"references\": datasets.Sequence(datasets.Value(\"int32\")),\r\n }\r\n ),\r\n reference_urls=[\"https:\/\/scikit-learn.org\/stable\/modules\/generated\/sklearn.metrics.f1_score.html\"],\r\n )\r\n\r\n def _compute(self, predictions, references, labels=None, pos_label=1, average=\"binary\", sample_weight=None):\r\n return {\r\n \"f1\": f1_score(\r\n references,\r\n predictions,\r\n labels=labels,\r\n pos_label=pos_label,\r\n average=average,\r\n sample_weight=sample_weight,\r\n ),\r\n }\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2553\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2553","id":931365926,"node_id":"MDU6SXNzdWU5MzEzNjU5MjY=","number":2553,"title":"load_dataset(\"web_nlg\") NonMatchingChecksumError","user":{"login":"alexandrethm","id":33730312,"node_id":"MDQ6VXNlcjMzNzMwMzEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33730312?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexandrethm","html_url":"https:\/\/github.com\/alexandrethm","followers_url":"https:\/\/api.github.com\/users\/alexandrethm\/followers","following_url":"https:\/\/api.github.com\/users\/alexandrethm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexandrethm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexandrethm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexandrethm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexandrethm\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexandrethm\/repos","events_url":"https:\/\/api.github.com\/users\/alexandrethm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexandrethm\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-06-28T09:26:46Z","updated_at":"2021-06-28T17:23:39Z","closed_at":"2021-06-28T17:23:16Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi! It seems the WebNLG dataset gives a NonMatchingChecksumError.\r\n\r\n## Steps to reproduce the bug\r\n\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('web_nlg', name=\"release_v3.0_en\", split=\"dev\")\r\n```\r\n\r\nGives\r\n\r\n```\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/gitlab.com\/shimorina\/webnlg-dataset\/-\/archive\/master\/webnlg-dataset-master.zip']\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: macOS-11.3.1-x86_64-i386-64bit\r\n- Python version: 3.9.4\r\n- PyArrow version: 3.0.0\r\n\r\nAlso tested on Linux, with python 3.6.8","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2552\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2552","id":931354687,"node_id":"MDU6SXNzdWU5MzEzNTQ2ODc=","number":2552,"title":"Keys should be unique error on code_search_net","user":{"login":"thomwolf","id":7353373,"node_id":"MDQ6VXNlcjczNTMzNzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7353373?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/thomwolf","html_url":"https:\/\/github.com\/thomwolf","followers_url":"https:\/\/api.github.com\/users\/thomwolf\/followers","following_url":"https:\/\/api.github.com\/users\/thomwolf\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/thomwolf\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/thomwolf\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/thomwolf\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/thomwolf\/orgs","repos_url":"https:\/\/api.github.com\/users\/thomwolf\/repos","events_url":"https:\/\/api.github.com\/users\/thomwolf\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/thomwolf\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-06-28T09:15:20Z","updated_at":"2021-06-28T14:27:42Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nLoading `code_search_net` seems not possible at the moment.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n>>> load_dataset('code_search_net')\r\nDownloading: 8.50kB [00:00, 3.09MB\/s] \r\nDownloading: 19.1kB [00:00, 10.1MB\/s] \r\nNo config specified, defaulting to: code_search_net\/all\r\nDownloading and preparing dataset code_search_net\/all (download: 4.77 GiB, generated: 5.99 GiB, post-processed: Unknown size, total: 10.76 GiB) to \/Users\/thomwolf\/.cache\/huggingface\/datasets\/code_search_net\/all\/1.0.0\/b3e8278faf5d67da1d06981efbeac3b76a2900693bd2239bbca7a4a3b0d6e52a...\r\nTraceback (most recent call last): \r\n File \"\/Users\/thomwolf\/Documents\/GitHub\/datasets\/src\/datasets\/builder.py\", line 1067, in _prepare_split\r\n writer.write(example, key)\r\n File \"\/Users\/thomwolf\/Documents\/GitHub\/datasets\/src\/datasets\/arrow_writer.py\", line 343, in write\r\n self.check_duplicate_keys()\r\n File \"\/Users\/thomwolf\/Documents\/GitHub\/datasets\/src\/datasets\/arrow_writer.py\", line 354, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 48\r\nKeys should be unique and deterministic in nature\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.1.dev0\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n- Python version: 3.8.5\r\n- PyArrow version: 2.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2551\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2551","id":930967978,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc4NTQzMjg1","number":2551,"title":"Fix FileSystems documentation","user":{"login":"connor-mccarthy","id":55268212,"node_id":"MDQ6VXNlcjU1MjY4MjEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55268212?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/connor-mccarthy","html_url":"https:\/\/github.com\/connor-mccarthy","followers_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/followers","following_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/orgs","repos_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/repos","events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/connor-mccarthy\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-27T16:18:42Z","updated_at":"2021-06-28T13:09:55Z","closed_at":"2021-06-28T13:09:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2551","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2551","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2551.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2551.patch"},"body":"### What this fixes:\r\nThis PR resolves several issues I discovered in the documentation on the `datasets.filesystems` module ([this page](https:\/\/huggingface.co\/docs\/datasets\/filesystems.html)).\r\n\r\n### What were the issues?\r\nWhen I originally tried implementing the code examples I faced several bugs attributed to:\r\n\r\n- out of date [botocore](https:\/\/github.com\/boto\/botocore) call signatures\r\n- capitalization errors in the `S3FileSystem` class name (written as `S3Filesystem` in one place)\r\n- call signature errors for the `S3FileSystem` class constructor (uses parameter `sessions` instead of `session` in some places) (see [`s3fs`](https:\/\/s3fs.readthedocs.io\/en\/latest\/api.html#s3fs.core.S3FileSystem) for where this constructor signature is defined)\r\n\r\n### Testing\/reviewing notes\r\nInstructions for generating the documentation locally: [here](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/docs#generating-the-documentation).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2550\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2550","id":930951287,"node_id":"MDU6SXNzdWU5MzA5NTEyODc=","number":2550,"title":"Allow for incremental cumulative metric updates in a distributed setup","user":{"login":"eladsegal","id":13485709,"node_id":"MDQ6VXNlcjEzNDg1NzA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13485709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eladsegal","html_url":"https:\/\/github.com\/eladsegal","followers_url":"https:\/\/api.github.com\/users\/eladsegal\/followers","following_url":"https:\/\/api.github.com\/users\/eladsegal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eladsegal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eladsegal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eladsegal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eladsegal\/orgs","repos_url":"https:\/\/api.github.com\/users\/eladsegal\/repos","events_url":"https:\/\/api.github.com\/users\/eladsegal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eladsegal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-27T15:00:58Z","updated_at":"2021-06-27T17:16:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Currently, using a metric allows for one of the following:\r\n- Per example\/batch metrics\r\n- Cumulative metrics over the whole data\r\n\r\nWhat I'd like is to have an efficient way to get cumulative metrics over the examples\/batches added so far, in order to display it as part of the progress bar during training\/evaluation.\r\n\r\nSince most metrics are just an average of per-example metrics (which aren't?), an efficient calculation can be done as follows:\r\n`((score_cumulative * n_cumulative) + (score_new * n_new)) \/ (n_cumulative+ n_new)`\r\nwhere `n` and `score` refer to number of examples and metric score, `cumulative` refers to the cumulative metric and `new` refers to the addition of new examples.\r\n\r\nIf you don't want to add this capability in the library, a simple solution exists so users can do it themselves:\r\nIt is easy to implement for a single process setup, but in a distributed one there is no way to get the correct `n_new`.\r\nThe solution for this is to return the number of examples that was used to compute the metrics in `.compute()` by adding the following line here:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/5a3221785311d0ce86c2785b765e86bd6997d516\/src\/datasets\/metric.py#L402-L403\r\n```\r\noutput[\"number_of_examples\"] = len(predictions)\r\n```\r\nand also remove the log message here so it won't spam:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/3db67f5ff6cbf807b129d2b4d1107af27623b608\/src\/datasets\/metric.py#L411\r\n\r\nIf this change is ok with you, I'll open a pull request.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2549\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2549","id":929819093,"node_id":"MDU6SXNzdWU5Mjk4MTkwOTM=","number":2549,"title":"Handling unlabeled datasets","user":{"login":"nelson-liu","id":7272031,"node_id":"MDQ6VXNlcjcyNzIwMzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7272031?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nelson-liu","html_url":"https:\/\/github.com\/nelson-liu","followers_url":"https:\/\/api.github.com\/users\/nelson-liu\/followers","following_url":"https:\/\/api.github.com\/users\/nelson-liu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nelson-liu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nelson-liu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nelson-liu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nelson-liu\/orgs","repos_url":"https:\/\/api.github.com\/users\/nelson-liu\/repos","events_url":"https:\/\/api.github.com\/users\/nelson-liu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nelson-liu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-25T04:32:23Z","updated_at":"2021-06-25T21:07:57Z","closed_at":"2021-06-25T21:07:56Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi!\r\n\r\nIs there a way for datasets to produce unlabeled instances (e.g., the `ClassLabel` can be nullable).\r\n\r\nFor example, I want to use the MNLI dataset reader ( https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/multi_nli\/multi_nli.py ) on a file that doesn't have the `gold_label` field. I tried setting `\"label\": data.get(\"gold_label\")`, but got the following error:\r\n\r\n```\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 748, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 989, in _prepare_split\r\n example = self.info.features.encode_example(record)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 953, in encode_example\r\n return encode_nested_example(self, example)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 848, in encode_nested_example\r\n k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 848, in \r\n k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 875, in encode_nested_example\r\n return schema.encode_example(obj)\r\n File \"\/home\/nfliu\/miniconda3\/envs\/debias\/lib\/python3.7\/site-packages\/datasets\/features.py\", line 653, in encode_example\r\n if not -1 <= example_data < self.num_classes:\r\nTypeError: '<=' not supported between instances of 'int' and 'NoneType'\r\n```\r\n\r\nWhat's the proper way to handle reading unlabeled datasets, especially for downstream usage with Transformers?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2548\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2548","id":929232831,"node_id":"MDU6SXNzdWU5MjkyMzI4MzE=","number":2548,"title":"Field order issue in loading json","user":{"login":"luyug","id":55288513,"node_id":"MDQ6VXNlcjU1Mjg4NTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55288513?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/luyug","html_url":"https:\/\/github.com\/luyug","followers_url":"https:\/\/api.github.com\/users\/luyug\/followers","following_url":"https:\/\/api.github.com\/users\/luyug\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/luyug\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/luyug\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/luyug\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/luyug\/orgs","repos_url":"https:\/\/api.github.com\/users\/luyug\/repos","events_url":"https:\/\/api.github.com\/users\/luyug\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/luyug\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-24T13:29:53Z","updated_at":"2021-06-24T14:36:43Z","closed_at":"2021-06-24T14:34:05Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe `load_dataset` function expects columns in alphabetical order when loading json files.\r\n\r\nSimilar bug was previously reported for csv in #623 and fixed in #684.\r\n## Steps to reproduce the bug\r\n\r\nFor a json file `j.json`,\r\n```\r\n{\"c\":321, \"a\": 1, \"b\": 2}\r\n```\r\nRunning the following,\r\n```\r\nf= datasets.Features({'a': Value('int32'), 'b': Value('int32'), 'c': Value('int32')})\r\njson_data = datasets.load_dataset('json', data_files='j.json', features=f)\r\n```\r\n\r\n\r\n## Expected results\r\nA successful load.\r\n## Actual results\r\n```\r\nFile \"pyarrow\/table.pxi\", line 1409, in pyarrow.lib.Table.cast\r\nValueError: Target schema's field names are not matching the table's field names: ['c', 'a', 'b'], ['a', 'b', 'c']\r\n```\r\n\r\n## Environment info\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-3.10.0-957.1.3.el7.x86_64-x86_64-with-glibc2.10\r\n- Python version: 3.8.8\r\n- PyArrow version: 3.0.0\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2547\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2547","id":929192329,"node_id":"MDU6SXNzdWU5MjkxOTIzMjk=","number":2547,"title":"Dataset load_from_disk is too slow","user":{"login":"alexvaca0","id":35173563,"node_id":"MDQ6VXNlcjM1MTczNTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35173563?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexvaca0","html_url":"https:\/\/github.com\/alexvaca0","followers_url":"https:\/\/api.github.com\/users\/alexvaca0\/followers","following_url":"https:\/\/api.github.com\/users\/alexvaca0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexvaca0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexvaca0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexvaca0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexvaca0\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexvaca0\/repos","events_url":"https:\/\/api.github.com\/users\/alexvaca0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexvaca0\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-24T12:45:44Z","updated_at":"2021-06-25T14:56:38Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"@lhoestq \r\n## Describe the bug\r\nIt's not normal that I have to wait 7-8 hours for a dataset to be loaded from disk, as there are no preprocessing steps, it's only loading it with load_from_disk. I have 96 cpus, however only 1 is used for this, which is inefficient. Moreover, its usage is at 1%... This is happening in the context of a language model training, therefore I'm wasting 100$ each time I have to load the dataset from disk again (because the spot instance was stopped by aws and I need to relaunch it for example). \r\n\r\n## Steps to reproduce the bug\r\nJust get the oscar in spanish (around 150GGB) and try to first save in disk and then load the processed dataset. It's not dependent on the task you're doing, it just depends on the size of the text dataset.\r\n\r\n## Expected results\r\nI expect the dataset to be loaded in a normal time, by using the whole machine for loading it, I mean if you store the dataset in multiple files (.arrow) and then load it from multiple files, you can use multiprocessing for that and therefore don't waste so much time. \r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Ubuntu 18\r\n- Python version: 3.8\r\n\r\n\r\nI've seen you're planning to include a streaming mode for load_dataset, but that only saves the downloading and processing time, that's not being a problem for me, you cannot save the pure loading from disk time, therefore that's not a solution for my use case or for anyone who wants to use your library for training a language model. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2546\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2546","id":929091689,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2OTk2MjQ0","number":2546,"title":"Add license to the Cambridge English Write & Improve + LOCNESS dataset card","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-24T10:39:29Z","updated_at":"2021-06-24T10:52:01Z","closed_at":"2021-06-24T10:52:01Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2546","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2546","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2546.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2546.patch"},"body":"As noticed in https:\/\/github.com\/huggingface\/datasets\/pull\/2539, the licensing information was missing for this dataset.\r\n\r\nI added it and I also filled a few other empty sections.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2545\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2545","id":929016580,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2OTMxOTYw","number":2545,"title":"Fix DuplicatedKeysError in drop dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-24T09:10:39Z","updated_at":"2021-06-24T14:57:08Z","closed_at":"2021-06-24T14:57:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2545","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2545","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2545.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2545.patch"},"body":"Close #2542.\r\n\r\ncc: @VictorSanh.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2544\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2544","id":928900827,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2ODM1MjYz","number":2544,"title":"Fix logging levels","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-24T06:41:36Z","updated_at":"2021-06-25T13:40:19Z","closed_at":"2021-06-25T13:40:19Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2544","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2544","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2544.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2544.patch"},"body":"Sometimes default `datasets` logging can be too verbose. One approach could be reducing some logging levels, from info to debug, or from warning to info.\r\n\r\nClose #2543.\r\n\r\ncc: @stas00 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2543\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2543","id":928571915,"node_id":"MDU6SXNzdWU5Mjg1NzE5MTU=","number":2543,"title":"switching some low-level log.info's to log.debug?","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-23T19:26:55Z","updated_at":"2021-06-25T13:40:19Z","closed_at":"2021-06-25T13:40:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"In https:\/\/github.com\/huggingface\/transformers\/pull\/12276 we are now changing the examples to have `datasets` on the same log level as `transformers`, so that one setting can do a consistent logging across all involved components.\r\n\r\nThe trouble is that now we get a ton of these:\r\n\r\n```\r\n06\/23\/2021 12:15:31 - INFO - datasets.utils.filelock - Lock 139627640431136 acquired on \/home\/stas\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow.lock\r\n06\/23\/2021 12:15:31 - INFO - datasets.arrow_writer - Done writing 50 examples in 12280 bytes \/home\/stas\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow.\r\n06\/23\/2021 12:15:31 - INFO - datasets.arrow_dataset - Set __getitem__(key) output type to python objects for no columns (when key is int or slice) and don't output other (un-formatted) columns.\r\n06\/23\/2021 12:15:31 - INFO - datasets.utils.filelock - Lock 139627640431136 released on \/home\/stas\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow.lock\r\n```\r\n\r\nMay I suggest that these can be `log.debug` as it's no informative to the user.\r\n\r\nMore examples: these are not informative - too much information:\r\n```\r\n06\/23\/2021 12:14:26 - INFO - datasets.load - Checking \/home\/stas\/.cache\/huggingface\/datasets\/downloads\/459933f1fe47711fad2f6ff8110014ff189120b45ad159ef5b8e90ea43a174fa.e23e7d1259a8c6274a82a42a8936dd1b87225302c6dc9b7261beb3bc2daac640.py for additional imports.\r\n06\/23\/2021 12:14:27 - INFO - datasets.builder - Constructing Dataset for split train, validation, test, from \/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/0d9fb3e814712c785176ad8cdb9f465fbe6479000ee6546725db30ad8a8b5f8a\r\n\r\n```\r\n\r\nWhile these are:\r\n```\r\n06\/23\/2021 12:14:27 - INFO - datasets.info - Loading Dataset Infos from \/home\/stas\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt16\/0d9fb3e814712c785176ad8cdb9f465fbe6479000ee6546725db30ad8a8b5f8a\r\n06\/23\/2021 12:14:27 - WARNING - datasets.builder - Reusing dataset wmt16 (\/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/0d9fb3e814712c785176ad8cdb9f465fbe6479000ee6546725db30ad8a8b5f8a)\r\n```\r\n\r\nI also realize that `transformers` examples don't have do use `info` for `datasets` to let the default `warning` keep logging to less noisy.\r\n\r\nBut I think currently the log levels are slightly misused and skewed by 1 level. Many `warnings` will better be `info`s and most `info`s be `debug`.\r\n\r\ne.g.:\r\n\r\n```\r\n06\/23\/2021 12:14:27 - WARNING - datasets.builder - Reusing dataset wmt16 (\/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/0d9fb3e814712c785176ad8cdb9f465fbe6479000ee6546725db30ad8a8b5f8a)\r\n```\r\n\r\nwhy is this a warning? it is informing me that the cache is used, there is nothing to be worried about. I'd have it as `info`.\r\n\r\nWarnings are typically something that's bordering error or the first thing to check when things don't work as expected.\r\n\r\ninfrequent info is there to inform of the different stages or important events.\r\n\r\nEverything else is debug.\r\n\r\nAt least the way I understand things. \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2542\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2542","id":928540382,"node_id":"MDU6SXNzdWU5Mjg1NDAzODI=","number":2542,"title":"`datasets.keyhash.DuplicatedKeysError` for `drop` and `adversarial_qa\/adversarialQA`","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-06-23T18:41:16Z","updated_at":"2021-06-25T21:50:05Z","closed_at":"2021-06-24T14:57:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nFailure to generate the datasets (`drop` and subset `adversarialQA` from `adversarial_qa`) because of duplicate keys.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\nload_dataset(\"drop\")\r\nload_dataset(\"adversarial_qa\", \"adversarialQA\")\r\n```\r\n\r\n## Expected results\r\nThe examples keys should be unique.\r\n\r\n## Actual results\r\n```bash\r\n>>> load_dataset(\"drop\")\r\nUsing custom data configuration default\r\nDownloading and preparing dataset drop\/default (download: 7.92 MiB, generated: 111.88 MiB, post-processed: Unknown size, total: 119.80 MiB) to \/home\/hf\/.cache\/huggingface\/datasets\/drop\/default\/0.1.0\/7a94f1e2bb26c4b5c75f89857c06982967d7416e5af935a9374b9bccf5068026...\r\nTraceback (most recent call last): \r\n File \"\", line 1, in \r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 751, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 992, in _prepare_split\r\n num_examples, num_bytes = writer.finalize()\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 409, in finalize\r\n self.check_duplicate_keys()\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 349, in check_duplicate_keys\r\n raise DuplicatedKeysError(key)\r\ndatasets.keyhash.DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 28553293-d719-441b-8f00-ce3dc6df5398\r\nKeys should be unique and deterministic in nature\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.4.0-1044-gcp-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2541\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2541","id":928529078,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2NTIwNDgx","number":2541,"title":"update discofuse link cc @ekQ","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-23T18:24:58Z","updated_at":"2021-06-28T14:34:51Z","closed_at":"2021-06-28T14:34:50Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2541","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2541","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2541.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2541.patch"},"body":"Updating the discofuse link: https:\/\/github.com\/google-research-datasets\/discofuse\/commit\/fd4b120cb3dd19a417e7f3b5432010b574b5eeee","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2540\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2540","id":928433892,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2NDM5NTM1","number":2540,"title":"Remove task templates if required features are removed during `Dataset.map`","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-23T16:20:25Z","updated_at":"2021-06-24T14:41:15Z","closed_at":"2021-06-24T13:34:03Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2540","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2540","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2540.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2540.patch"},"body":"This PR fixes a bug reported by @craffel where removing a dataset's columns during `Dataset.map` triggered a `KeyError` because the `TextClassification` template tried to access the removed columns during `DatasetInfo.__post_init__`:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# `yelp_polarity` comes with a `TextClassification` template\r\nds = load_dataset(\"yelp_polarity\", split=\"test\")\r\nds\r\n# Dataset({\r\n# features: ['text', 'label'],\r\n# num_rows: 38000\r\n# })\r\n\r\n# Triggers KeyError: 'label' - oh noes!\r\nds.map(lambda x: {\"inputs\": 0}, remove_columns=ds.column_names)\r\n```\r\n\r\nI wrote a unit test to make sure I could reproduce the error and then patched a fix.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2539\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2539","id":927952429,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc2MDI5MDY5","number":2539,"title":"remove wi_locness dataset due to licensing issues","user":{"login":"aseifert","id":4944799,"node_id":"MDQ6VXNlcjQ5NDQ3OTk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4944799?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aseifert","html_url":"https:\/\/github.com\/aseifert","followers_url":"https:\/\/api.github.com\/users\/aseifert\/followers","following_url":"https:\/\/api.github.com\/users\/aseifert\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aseifert\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aseifert\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aseifert\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aseifert\/orgs","repos_url":"https:\/\/api.github.com\/users\/aseifert\/repos","events_url":"https:\/\/api.github.com\/users\/aseifert\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aseifert\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-06-23T07:35:32Z","updated_at":"2021-06-25T14:52:42Z","closed_at":"2021-06-25T14:52:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2539","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2539","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2539.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2539.patch"},"body":"It was brought to my attention that this dataset's license is not only missing, but also prohibits redistribution. I contacted the original author to apologize for this oversight and asked if we could still use it, but unfortunately we can't and the author kindly asked to take down this dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2538\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2538","id":927940691,"node_id":"MDU6SXNzdWU5Mjc5NDA2OTE=","number":2538,"title":"Loading partial dataset when debugging","user":{"login":"reachtarunhere","id":9061913,"node_id":"MDQ6VXNlcjkwNjE5MTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9061913?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/reachtarunhere","html_url":"https:\/\/github.com\/reachtarunhere","followers_url":"https:\/\/api.github.com\/users\/reachtarunhere\/followers","following_url":"https:\/\/api.github.com\/users\/reachtarunhere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/reachtarunhere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/reachtarunhere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/reachtarunhere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/reachtarunhere\/orgs","repos_url":"https:\/\/api.github.com\/users\/reachtarunhere\/repos","events_url":"https:\/\/api.github.com\/users\/reachtarunhere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/reachtarunhere\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-06-23T07:19:52Z","updated_at":"2021-07-29T14:10:33Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I am using PyTorch Lightning along with datasets (thanks for so many datasets already prepared and the great splits). \r\n\r\nEvery time I execute load_dataset for the imdb dataset it takes some time even if I specify a split involving very few samples. I guess this due to hashing as per the other issues.\r\n\r\nIs there a way to only load part of the dataset on load_dataset? This would really speed up my workflow.\r\nSomething like a debug mode would really help. Thanks!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2537\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2537","id":927472659,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1NjI1OTY3","number":2537,"title":"Add Parquet loader + from_parquet and to_parquet","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-22T17:28:23Z","updated_at":"2021-06-30T16:31:03Z","closed_at":"2021-06-30T16:30:58Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2537","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2537","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2537.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2537.patch"},"body":"Continuation of #2247 \r\n\r\nI added a \"parquet\" dataset builder, as well as the methods `Dataset.from_parquet` and `Dataset.to_parquet`.\r\nAs usual, the data are converted to arrow in a batched way to avoid loading everything in memory.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2536\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2536","id":927338639,"node_id":"MDU6SXNzdWU5MjczMzg2Mzk=","number":2536,"title":"Use `Audio` features for `AutomaticSpeechRecognition` task template","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-22T15:07:21Z","updated_at":"2021-06-22T15:16:51Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"In #2533 we added a task template for speech recognition that relies on the file paths to the audio files. As pointed out by @SBrandeis this is brittle as it doesn't port easily across different OS'. \r\n\r\nThe solution is to use dedicated `Audio` features when casting the dataset. These features are not yet available in `datasets`, but should be included in the `AutomaticSpeechRecognition` template once they are.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2535\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2535","id":927334349,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1NTA3MTAw","number":2535,"title":"Improve Features docs","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-22T15:03:27Z","updated_at":"2021-06-23T13:40:43Z","closed_at":"2021-06-23T13:40:43Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2535","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2535","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2535.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2535.patch"},"body":"- Fix rendering and cross-references in Features docs\r\n- Add docstrings to Features methods","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2534\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2534","id":927201435,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1MzkzODg0","number":2534,"title":"Sync with transformers disabling NOTSET","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-22T12:54:21Z","updated_at":"2021-06-24T14:42:47Z","closed_at":"2021-06-24T14:42:47Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2534","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2534","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2534.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2534.patch"},"body":"Close #2528.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2533\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2533","id":927193264,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1Mzg2OTMw","number":2533,"title":"Add task template for automatic speech recognition","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-22T12:45:02Z","updated_at":"2021-06-23T16:14:46Z","closed_at":"2021-06-23T15:56:57Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2533","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2533","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2533.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2533.patch"},"body":"This PR adds a task template for automatic speech recognition. In this task, the input is a path to an audio file which the model consumes to produce a transcription.\r\n\r\nUsage:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom datasets.tasks import AutomaticSpeechRecognition\r\n\r\nds = load_dataset(\"timit_asr\", split=\"train[:10]\")\r\n# Dataset({\r\n# features: ['file', 'text', 'phonetic_detail', 'word_detail', 'dialect_region', 'sentence_type', 'speaker_id', 'id'],\r\n# num_rows: 10\r\n# })\r\n\r\ntask = AutomaticSpeechRecognition(audio_file_column=\"file\", transcription_column=\"text\")\r\nds.prepare_for_task(task)\r\n# Dataset({\r\n# features: ['audio_file', 'transcription'],\r\n# num_rows: 10\r\n# })\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2532\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2532","id":927063196,"node_id":"MDU6SXNzdWU5MjcwNjMxOTY=","number":2532,"title":"Tokenizer's normalization preprocessor cause misalignment in return_offsets_mapping for tokenizer classification task","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-22T10:08:18Z","updated_at":"2021-06-23T05:17:25Z","closed_at":"2021-06-23T05:17:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"[This colab notebook](https:\/\/colab.research.google.com\/drive\/151gKyo0YIwnlznrOHst23oYH_a3mAe3Z?usp=sharing) implements a token classification input pipeline extending the logic from [this hugging example](https:\/\/huggingface.co\/transformers\/custom_datasets.html#tok-ner).\r\n\r\nThe pipeline works fine with most instance in different languages, but unfortunately, [the Japanese Kana ligature (a form of abbreviation? I don't know Japanese well)](https:\/\/en.wikipedia.org\/wiki\/Kana_ligature) break the alignment of `return_offsets_mapping`:\r\n![image](https:\/\/user-images.githubusercontent.com\/50871412\/122904371-db192700-d382-11eb-8917-1775db76db69.png)\r\n\r\nWithout the try catch block, it riase `ValueError: NumPy boolean array indexing assignment cannot assign 88 input values to the 87 output values where the mask is true`, example shown here [(another colab notebook)](https:\/\/colab.research.google.com\/drive\/1MmOqf3ppzzdKKyMWkn0bJy6DqzOO0SSm?usp=sharing)\r\n\r\nIt is clear that the normalizer is the process that break the alignment, as it is observed that `tokenizer._tokenizer.normalizer.normalize_str('\u30ff')` return '\u30b3\u30c8'.\r\n\r\nOne workaround is to include `tokenizer._tokenizer.normalizer.normalize_str` before the tokenizer preprocessing pipeline, which is also provided in the [first colab notebook](https:\/\/colab.research.google.com\/drive\/151gKyo0YIwnlznrOHst23oYH_a3mAe3Z?usp=sharing) with the name `udposTestDatasetWorkaround`.\r\n\r\nI guess similar logics should be included inside the tokenizer and the offsets_mapping generation process such that user don't need to include them in their code. But I don't understand the code of tokenizer well that I think I am not able to do this.\r\n\r\np.s.\r\n**I am using my own dataset building script in the provided example, but the script should be equivalent to the changes made by this [update](https:\/\/github.com\/huggingface\/datasets\/pull\/2466)**\r\n`get_dataset `is just a simple wrapping for `load_dataset`\r\nand the `tokenizer` is just `XLMRobertaTokenizerFast.from_pretrained(\"xlm-roberta-large\")`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2531\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2531","id":927017924,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1MjM2MDYz","number":2531,"title":"Fix dev version","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-22T09:17:10Z","updated_at":"2021-06-22T09:47:10Z","closed_at":"2021-06-22T09:47:09Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2531","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2531","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2531.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2531.patch"},"body":"The dev version that ends in `.dev0` should be greater than the current version.\r\nHowever it happens that `1.8.0 > 1.8.0.dev0` for example.\r\nTherefore we need to use `1.8.1.dev0` for example in this case.\r\n\r\nI updated the dev version to use `1.8.1.dev0`, and I also added a comment in the setup.py in the release steps about this.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2530\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2530","id":927013773,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc1MjMyNDk0","number":2530,"title":"Fixed label parsing in the ProductReviews dataset","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-22T09:12:45Z","updated_at":"2021-06-22T12:55:20Z","closed_at":"2021-06-22T12:52:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2530","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2530","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2530.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2530.patch"},"body":"Fixed issue with parsing dataset labels. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2529\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2529","id":926378812,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc0NjkxNjA5","number":2529,"title":"Add summarization template","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-21T16:08:31Z","updated_at":"2021-06-23T14:22:11Z","closed_at":"2021-06-23T13:30:10Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2529","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2529","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2529.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2529.patch"},"body":"This PR adds a task template for text summarization. As far as I can tell, we do not need to distinguish between \"extractive\" or \"abstractive\" summarization - both can be handled with this template.\r\n\r\nUsage:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom datasets.tasks import Summarization\r\n\r\nds = load_dataset(\"xsum\", split=\"train\")\r\n# Dataset({\r\n# features: ['document', 'summary', 'id'],\r\n# num_rows: 204045\r\n# })\r\n\r\nsummarization = Summarization(text_column=\"document\", summary_column=\"summary\")\r\nds.prepare_for_task(summarization)\r\n# Dataset({\r\n# features: ['text', 'summary'],\r\n# num_rows: 204045\r\n# })\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2528\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2528","id":926314656,"node_id":"MDU6SXNzdWU5MjYzMTQ2NTY=","number":2528,"title":"Logging cannot be set to NOTSET similar to transformers","user":{"login":"joshzwiebel","id":34662010,"node_id":"MDQ6VXNlcjM0NjYyMDEw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34662010?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/joshzwiebel","html_url":"https:\/\/github.com\/joshzwiebel","followers_url":"https:\/\/api.github.com\/users\/joshzwiebel\/followers","following_url":"https:\/\/api.github.com\/users\/joshzwiebel\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/joshzwiebel\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/joshzwiebel\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/joshzwiebel\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/joshzwiebel\/orgs","repos_url":"https:\/\/api.github.com\/users\/joshzwiebel\/repos","events_url":"https:\/\/api.github.com\/users\/joshzwiebel\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/joshzwiebel\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-21T15:04:54Z","updated_at":"2021-06-24T14:42:47Z","closed_at":"2021-06-24T14:42:47Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nIn the transformers library you can set the verbosity level to logging.NOTSET to work around the usage of tqdm and IPywidgets, however in Datasets this is no longer possible. This is because transformers set the verbosity level of tqdm with [this](https:\/\/github.com\/huggingface\/transformers\/blob\/b53bc55ba9bb10d5ee279eab51a2f0acc5af2a6b\/src\/transformers\/file_utils.py#L1449) \r\n`disable=bool(logging.get_verbosity() == logging.NOTSET)`\r\nand datasets accomplishes this like [so](https:\/\/github.com\/huggingface\/datasets\/blob\/83554e410e1ab8c6f705cfbb2df7953638ad3ac1\/src\/datasets\/utils\/file_utils.py#L493)\r\n`not_verbose = bool(logger.getEffectiveLevel() > WARNING)`\r\n## Steps to reproduce the bug\r\n```python\r\nimport datasets\r\nimport logging\r\ndatasets.logging.get_verbosity = lambda : logging.NOTSET\r\ndatasets.load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\")\r\n```\r\n\r\n## Expected results\r\nThe code should download and load the dataset as normal without displaying progress bars\r\n\r\n## Actual results\r\n```ImportError Traceback (most recent call last)\r\n in \r\n----> 1 datasets.load_dataset(\"patrickvonplaten\/librispeech_asr_dummy\")\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, **config_kwargs)\r\n 713 dataset=True,\r\n 714 return_resolved_file_path=True,\r\n--> 715 use_auth_token=use_auth_token,\r\n 716 )\r\n 717 # Set the base path for downloads as the parent of the script location\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/load.py in prepare_module(path, script_version, download_config, download_mode, dataset, force_local_path, dynamic_modules_path, return_resolved_file_path, **download_kwargs)\r\n 350 file_path = hf_bucket_url(path, filename=name, dataset=False)\r\n 351 try:\r\n--> 352 local_path = cached_path(file_path, download_config=download_config)\r\n 353 except FileNotFoundError:\r\n 354 raise FileNotFoundError(\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py in cached_path(url_or_filename, download_config, **download_kwargs)\r\n 289 use_etag=download_config.use_etag,\r\n 290 max_retries=download_config.max_retries,\r\n--> 291 use_auth_token=download_config.use_auth_token,\r\n 292 )\r\n 293 elif os.path.exists(url_or_filename):\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n 668 headers=headers,\r\n 669 cookies=cookies,\r\n--> 670 max_retries=max_retries,\r\n 671 )\r\n 672 \r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py in http_get(url, temp_file, proxies, resume_size, headers, cookies, timeout, max_retries)\r\n 493 initial=resume_size,\r\n 494 desc=\"Downloading\",\r\n--> 495 disable=not_verbose,\r\n 496 )\r\n 497 for chunk in response.iter_content(chunk_size=1024):\r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/tqdm\/notebook.py in __init__(self, *args, **kwargs)\r\n 217 total = self.total * unit_scale if self.total else self.total\r\n 218 self.container = self.status_printer(\r\n--> 219 self.fp, total, self.desc, self.ncols)\r\n 220 self.sp = self.display\r\n 221 \r\n\r\n~\/venv\/lib\/python3.7\/site-packages\/tqdm\/notebook.py in status_printer(_, total, desc, ncols)\r\n 95 if IProgress is None: # #187 #451 #558 #872\r\n 96 raise ImportError(\r\n---> 97 \"IProgress not found. Please update jupyter and ipywidgets.\"\r\n 98 \" See https:\/\/ipywidgets.readthedocs.io\/en\/stable\"\r\n 99 \"\/user_install.html\")\r\n\r\nImportError: IProgress not found. Please update jupyter and ipywidgets. See https:\/\/ipywidgets.readthedocs.io\/en\/stable\/user_install.html\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: 1.8.0\r\n- Platform: Linux-5.4.95-42.163.amzn2.x86_64-x86_64-with-debian-10.8\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\nI am running this code on Deepnote and which important to this issue **does not** support IPywidgets\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2527\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2527","id":926031525,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc0MzkzNjQ5","number":2527,"title":"Replace bad `n>1M` size tag","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-21T09:42:35Z","updated_at":"2021-06-21T15:06:50Z","closed_at":"2021-06-21T15:06:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2527","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2527","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2527.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2527.patch"},"body":"Some datasets were still using the old `n>1M` tag which has been replaced with tags `1M1M`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2526\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2526","id":925929228,"node_id":"MDU6SXNzdWU5MjU5MjkyMjg=","number":2526,"title":"Add COCO datasets","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-21T07:48:32Z","updated_at":"2021-06-21T07:48:32Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** COCO\r\n- **Description:** COCO is a large-scale object detection, segmentation, and captioning dataset.\r\n- **Paper + website:** https:\/\/cocodataset.org\/#home\r\n- **Data:** https:\/\/cocodataset.org\/#download\r\n- **Motivation:** It would be great to have COCO available in HuggingFace datasets, as we are moving beyond just text. COCO includes multi-modalities (images + text), as well as a huge amount of images annotated with objects, segmentation masks, keypoints etc., on which models like DETR (which I recently added to HuggingFace Transformers) are trained. Currently, one needs to download everything from the website and place it in a local folder, but it would be much easier if we can directly access it through the datasets API.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2525\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2525","id":925896358,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc0Mjc5MTgy","number":2525,"title":"Use scikit-learn package rather than sklearn in setup.py","user":{"login":"lesteve","id":1680079,"node_id":"MDQ6VXNlcjE2ODAwNzk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1680079?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lesteve","html_url":"https:\/\/github.com\/lesteve","followers_url":"https:\/\/api.github.com\/users\/lesteve\/followers","following_url":"https:\/\/api.github.com\/users\/lesteve\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lesteve\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lesteve\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lesteve\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lesteve\/orgs","repos_url":"https:\/\/api.github.com\/users\/lesteve\/repos","events_url":"https:\/\/api.github.com\/users\/lesteve\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lesteve\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-21T07:04:25Z","updated_at":"2021-06-21T10:01:13Z","closed_at":"2021-06-21T08:57:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2525","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2525","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2525.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2525.patch"},"body":"The sklearn package is an historical thing and should probably not be used by anyone, see https:\/\/github.com\/scikit-learn\/scikit-learn\/issues\/8215#issuecomment-344679114 for some caveats.\r\n\r\nNote: this affects only TESTS_REQUIRE so I guess only developers not end users.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2524\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2524","id":925610934,"node_id":"MDExOlB1bGxSZXF1ZXN0Njc0MDQzNzk1","number":2524,"title":"Raise FileNotFoundError in WindowsFileLock","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-20T14:25:11Z","updated_at":"2021-06-28T09:56:22Z","closed_at":"2021-06-28T08:47:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2524","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2524","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2524.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2524.patch"},"body":"Closes #2443 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2523\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2523","id":925421008,"node_id":"MDU6SXNzdWU5MjU0MjEwMDg=","number":2523,"title":"Fr","user":{"login":"aDrIaNo34500","id":71971234,"node_id":"MDQ6VXNlcjcxOTcxMjM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/71971234?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aDrIaNo34500","html_url":"https:\/\/github.com\/aDrIaNo34500","followers_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/followers","following_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/orgs","repos_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/repos","events_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aDrIaNo34500\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-19T15:56:32Z","updated_at":"2021-06-19T18:48:23Z","closed_at":"2021-06-19T18:48:23Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"__Originally posted by @lewtun in https:\/\/github.com\/huggingface\/datasets\/pull\/2469__","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2522\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2522","id":925334379,"node_id":"MDU6SXNzdWU5MjUzMzQzNzk=","number":2522,"title":"Documentation Mistakes in Dataset: emotion","user":{"login":"GDGauravDutta","id":62606251,"node_id":"MDQ6VXNlcjYyNjA2MjUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/62606251?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/GDGauravDutta","html_url":"https:\/\/github.com\/GDGauravDutta","followers_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/followers","following_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/orgs","repos_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/repos","events_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/GDGauravDutta\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-19T07:08:57Z","updated_at":"2021-06-19T17:38:16Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"As per documentation,\r\nDataset: emotion\r\nHomepage: https:\/\/github.com\/dair-ai\/emotion_dataset\r\n\r\nDataset: https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/emotion\/emotion.py\r\n\r\nPermalink: https:\/\/huggingface.co\/datasets\/viewer\/?dataset=emotion\r\n\r\nEmotion is a dataset of English Twitter messages with eight basic emotions: anger, anticipation, disgust, fear, joy, sadness, surprise, and trust. For more detailed information please refer to the paper.\r\n\r\nBut when we view the data, there are only 6 emotions, anger, fear, joy, sadness, surprise, and trust.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2521\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2521","id":925030685,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczNTgxNzQ4","number":2521,"title":"Insert text classification template for Emotion dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-18T15:56:19Z","updated_at":"2021-06-21T09:22:31Z","closed_at":"2021-06-21T09:22:31Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2521","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2521","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2521.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2521.patch"},"body":"This PR includes a template and updated `dataset_infos.json` for the `emotion` dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2520\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2520","id":925015004,"node_id":"MDU6SXNzdWU5MjUwMTUwMDQ=","number":2520,"title":"Datasets with tricky task templates","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-18T15:33:57Z","updated_at":"2021-06-18T15:46:26Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"I'm collecting a list of datasets here that don't follow the \"standard\" taxonomy and require further investigation to implement task templates for.\r\n\r\n## Text classification\r\n\r\n* [hatexplain](https:\/\/huggingface.co\/datasets\/hatexplain): ostensibly a form of text classification, but not in the standard `(text, target)` format and each sample appears to be tokenized.\r\n* [muchocine](https:\/\/huggingface.co\/datasets\/muchocine): contains two candidate text columns (long-form and summary) which in principle requires two `TextClassification` templates which is not currently supported ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2519\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2519","id":924903240,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczNDcyMzYy","number":2519,"title":"Improve performance of pandas arrow extractor","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-18T13:24:41Z","updated_at":"2021-06-21T09:06:06Z","closed_at":"2021-06-21T09:06:06Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2519","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2519","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2519.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2519.patch"},"body":"While reviewing PR #2505, I noticed that pandas arrow extractor could be refactored to be faster.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2518\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2518","id":924654100,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczMjU5Nzg1","number":2518,"title":"Add task templates for tydiqa and xquad","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-18T08:06:34Z","updated_at":"2021-06-18T15:01:17Z","closed_at":"2021-06-18T14:50:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2518","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2518","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2518.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2518.patch"},"body":"This PR adds question-answering templates to the remaining datasets that are linked to a model on the Hub.\r\n\r\nNotes: \r\n\r\n* I could not test the tydiqa implementation since I don't have enough disk space \ud83d\ude22 . But I am confident the template works :)\r\n* there exist other datasets like `fquad` and `mlqa` which are candidates for question-answering templates, but some work is needed to handle the ordering of nested column described in #2434 \r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2517\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2517","id":924643345,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczMjUwODk1","number":2517,"title":"Fix typo in MatthewsCorrelation class name","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-18T07:53:06Z","updated_at":"2021-06-18T08:43:55Z","closed_at":"2021-06-18T08:43:55Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2517","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2517","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2517.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2517.patch"},"body":"Close #2513.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2516\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2516","id":924597470,"node_id":"MDU6SXNzdWU5MjQ1OTc0NzA=","number":2516,"title":"datasets.map pickle issue resulting in invalid mapping function","user":{"login":"david-waterworth","id":5028974,"node_id":"MDQ6VXNlcjUwMjg5NzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5028974?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/david-waterworth","html_url":"https:\/\/github.com\/david-waterworth","followers_url":"https:\/\/api.github.com\/users\/david-waterworth\/followers","following_url":"https:\/\/api.github.com\/users\/david-waterworth\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/david-waterworth\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/david-waterworth\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/david-waterworth\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/david-waterworth\/orgs","repos_url":"https:\/\/api.github.com\/users\/david-waterworth\/repos","events_url":"https:\/\/api.github.com\/users\/david-waterworth\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/david-waterworth\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-06-18T06:47:26Z","updated_at":"2021-06-23T13:47:49Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I trained my own tokenizer, and I needed to use a python custom class. Because of this I have to detach the custom step before saving and reattach after restore. I did this using the standard pickle `__get_state__` \/ `__set_state__` mechanism. I think it's correct but it fails when I use it inside a function which is mapped to a dataset, i.e. in the manner of run_mlm.py and other huggingface scripts.\r\n\r\nThe following reproduces the issue - most likely I'm missing something\r\n\r\nA simulated tokeniser which can be pickled\r\n\r\n```\r\nclass CustomTokenizer:\r\n def __init__(self):\r\n self.state = \"init\"\r\n\r\n def __getstate__(self):\r\n print(\"__getstate__ called\")\r\n out = self.__dict__.copy()\r\n self.state = \"pickled\"\r\n return out\r\n \r\n def __setstate__(self, d):\r\n print(\"__setstate__ called\")\r\n self.__dict__ = d\r\n self.state = \"restored\"\r\n\r\ntokenizer = CustomTokenizer()\r\n```\r\n\r\nTest that it actually works - prints \"__getstate__ called\" and \"__setstate__ called\"\r\n```\r\nimport pickle\r\nserialized = pickle.dumps(tokenizer)\r\nrestored = pickle.loads(serialized)\r\nassert restored.state == \"restored\"\r\n```\r\n\r\nSimulate a function that tokenises examples, when dataset.map is called, this function \r\n```\r\ndef tokenize_function(examples):\r\n assert tokenizer.state == \"restored\" # this shouldn't fail but it does\r\n output = tokenizer(examples) # this will fail as tokenizer isn't really a tokenizer\r\n return output\r\n```\r\n\r\nUse map to simulate tokenization\r\n```\r\nimport glob\r\nfrom datasets import load_dataset\r\n\r\nassert tokenizer.state == \"restored\"\r\ntrain_files = glob.glob('train*.csv')\r\nvalidation_files = glob.glob('validation*.csv')\r\ndatasets = load_dataset(\"csv\", data_files=dict(train=train_files, validation=validation_files))\r\n\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n)\r\n```\r\n\r\nWhat's happening is I can see that __getstate__ is called but not __setstate__, so the state of `tokenize_function` is invalid at the point that it's actually executed. This doesn't matter as far as I can see for the standard tokenizers as they don't use __getstate__ \/ __setstate__. I'm not sure if there's another hook I'm supposed to implement as well?\r\n\r\n---------------------------------------------------------------------------\r\nAssertionError Traceback (most recent call last)\r\n in \r\n 8 tokenized_datasets = datasets.map(\r\n 9 tokenize_function,\r\n---> 10 batched=True,\r\n 11 )\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py in map(self, function, with_indices, input_columns, batched, batch_size, remove_columns, keep_in_memory, load_from_cache_file, cache_file_names, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, desc)\r\n 487 desc=desc,\r\n 488 )\r\n--> 489 for k, dataset in self.items()\r\n 490 }\r\n 491 )\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py in (.0)\r\n 487 desc=desc,\r\n 488 )\r\n--> 489 for k, dataset in self.items()\r\n 490 }\r\n 491 )\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\r\n 1633 fn_kwargs=fn_kwargs,\r\n 1634 new_fingerprint=new_fingerprint,\r\n-> 1635 desc=desc,\r\n 1636 )\r\n 1637 else:\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 184 }\r\n 185 # apply actual function\r\n--> 186 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 187 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 188 # re-apply format to the output\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 395 # Call actual function\r\n 396 \r\n--> 397 out = func(self, *args, **kwargs)\r\n 398 \r\n 399 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, desc)\r\n 1961 indices,\r\n 1962 check_same_num_examples=len(input_dataset.list_indexes()) > 0,\r\n-> 1963 offset=offset,\r\n 1964 )\r\n 1965 except NumExamplesMismatch:\r\n\r\n~\/.pyenv\/versions\/3.7.6\/envs\/xxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py in apply_function_on_filtered_inputs(inputs, indices, check_same_num_examples, offset)\r\n 1853 effective_indices = [i + offset for i in indices] if isinstance(indices, list) else indices + offset\r\n 1854 processed_inputs = (\r\n-> 1855 function(*fn_args, effective_indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n 1856 )\r\n 1857 if update_data is None:\r\n\r\n in tokenize_function(examples)\r\n 1 def tokenize_function(examples):\r\n----> 2 assert tokenizer.state == \"restored\"\r\n 3 tokenizer(examples)\r\n 4 return examples\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2515\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2515","id":924435447,"node_id":"MDExOlB1bGxSZXF1ZXN0NjczMDc3NTIx","number":2515,"title":"CRD3 dataset card","user":{"login":"wilsonyhlee","id":1937386,"node_id":"MDQ6VXNlcjE5MzczODY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1937386?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/wilsonyhlee","html_url":"https:\/\/github.com\/wilsonyhlee","followers_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/followers","following_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/orgs","repos_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/repos","events_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/wilsonyhlee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-18T00:24:07Z","updated_at":"2021-06-21T10:18:44Z","closed_at":"2021-06-21T10:18:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2515","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2515","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2515.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2515.patch"},"body":"This PR adds additional information to the CRD3 dataset card. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2514\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2514","id":924417172,"node_id":"MDU6SXNzdWU5MjQ0MTcxNzI=","number":2514,"title":"Can datasets remove duplicated rows?","user":{"login":"liuxinglan","id":16516583,"node_id":"MDQ6VXNlcjE2NTE2NTgz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16516583?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/liuxinglan","html_url":"https:\/\/github.com\/liuxinglan","followers_url":"https:\/\/api.github.com\/users\/liuxinglan\/followers","following_url":"https:\/\/api.github.com\/users\/liuxinglan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/liuxinglan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/liuxinglan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/liuxinglan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/liuxinglan\/orgs","repos_url":"https:\/\/api.github.com\/users\/liuxinglan\/repos","events_url":"https:\/\/api.github.com\/users\/liuxinglan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/liuxinglan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-17T23:35:38Z","updated_at":"2021-06-21T07:37:04Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\ni find myself more and more relying on datasets just to do all the preprocessing. One thing however, for removing duplicated rows, I couldn't find out how and am always converting datasets to pandas to do that..\r\n\r\n\r\n**Describe the solution you'd like**\r\nhave a functionality of \" remove duplicated rows\"\r\n\r\n**Describe alternatives you've considered**\r\nconvert dataset to pandas, remove duplicate, and convert back...\r\n\r\n\r\n**Additional context**\r\nno","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2513\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2513","id":924174413,"node_id":"MDU6SXNzdWU5MjQxNzQ0MTM=","number":2513,"title":"Corelation should be Correlation","user":{"login":"colbym-MM","id":71514164,"node_id":"MDQ6VXNlcjcxNTE0MTY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/71514164?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/colbym-MM","html_url":"https:\/\/github.com\/colbym-MM","followers_url":"https:\/\/api.github.com\/users\/colbym-MM\/followers","following_url":"https:\/\/api.github.com\/users\/colbym-MM\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/colbym-MM\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/colbym-MM\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/colbym-MM\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/colbym-MM\/orgs","repos_url":"https:\/\/api.github.com\/users\/colbym-MM\/repos","events_url":"https:\/\/api.github.com\/users\/colbym-MM\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/colbym-MM\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-17T17:28:48Z","updated_at":"2021-06-18T08:43:55Z","closed_at":"2021-06-18T08:43:55Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/blob\/0e87e1d053220e8ecddfa679bcd89a4c7bc5af62\/metrics\/matthews_correlation\/matthews_correlation.py#L66","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2512\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2512","id":924069353,"node_id":"MDU6SXNzdWU5MjQwNjkzNTM=","number":2512,"title":"seqeval metric does not work with a recent version of sklearn: classification_report() got an unexpected keyword argument 'output_dict'","user":{"login":"avidale","id":8642136,"node_id":"MDQ6VXNlcjg2NDIxMzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8642136?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/avidale","html_url":"https:\/\/github.com\/avidale","followers_url":"https:\/\/api.github.com\/users\/avidale\/followers","following_url":"https:\/\/api.github.com\/users\/avidale\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/avidale\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/avidale\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/avidale\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/avidale\/orgs","repos_url":"https:\/\/api.github.com\/users\/avidale\/repos","events_url":"https:\/\/api.github.com\/users\/avidale\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/avidale\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-17T15:36:02Z","updated_at":"2021-06-17T15:46:07Z","closed_at":"2021-06-17T15:46:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset, load_metric\r\nseqeval = load_metric(\"seqeval\")\r\nseqeval.compute(predictions=[['A']], references=[['A']])\r\n```\r\n\r\n## Expected results\r\nThe function computes a dict with metrics\r\n\r\n## Actual results\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n 1 from datasets import load_dataset, load_metric\r\n 2 seqeval = load_metric(\"seqeval\")\r\n----> 3 seqeval.compute(predictions=[['A']], references=[['A']])\r\n\r\n~\/p3\/lib\/python3.7\/site-packages\/datasets\/metric.py in compute(self, *args, **kwargs)\r\n 396 references = self.data[\"references\"]\r\n 397 with temp_seed(self.seed):\r\n--> 398 output = self._compute(predictions=predictions, references=references, **kwargs)\r\n 399 \r\n 400 if self.buf_writer is not None:\r\n\r\n~\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/seqeval\/81eda1ff004361d4fa48754a446ec69bb7aa9cf4d14c7215f407d1475941c5ff\/seqeval.py in _compute(self, predictions, references, suffix)\r\n 95 \r\n 96 def _compute(self, predictions, references, suffix=False):\r\n---> 97 report = classification_report(y_true=references, y_pred=predictions, suffix=suffix, output_dict=True)\r\n 98 report.pop(\"macro avg\")\r\n 99 report.pop(\"weighted avg\")\r\n\r\nTypeError: classification_report() got an unexpected keyword argument 'output_dict'\r\n```\r\n\r\n## Environment info\r\nsklearn=0.24\r\ndatasets=1.1.3\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2511\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2511","id":923762133,"node_id":"MDU6SXNzdWU5MjM3NjIxMzM=","number":2511,"title":"Add C4","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-06-17T10:31:04Z","updated_at":"2021-07-05T12:36:58Z","closed_at":"2021-07-05T12:36:57Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** *C4*\r\n- **Description:** *https:\/\/github.com\/allenai\/allennlp\/discussions\/5056*\r\n- **Paper:** *https:\/\/arxiv.org\/abs\/1910.10683*\r\n- **Data:** *https:\/\/huggingface.co\/datasets\/allenai\/c4*\r\n- **Motivation:** *Used a lot for pretraining*\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nShould fix https:\/\/github.com\/huggingface\/datasets\/issues\/1710","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2510\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2510","id":923735485,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcyNDY3MzY3","number":2510,"title":"Add align_labels_with_mapping to DatasetDict","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-17T10:03:35Z","updated_at":"2021-06-17T10:45:25Z","closed_at":"2021-06-17T10:45:24Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2510","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2510","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2510.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2510.patch"},"body":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457 added the `Dataset.align_labels_with_mapping` method.\r\nIn this PR I also added `DatasetDict.align_labels_with_mapping`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2509\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2509","id":922846035,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcxNjcyMzU5","number":2509,"title":"Fix fingerprint when moving cache dir","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-16T16:45:09Z","updated_at":"2021-06-21T15:05:04Z","closed_at":"2021-06-21T15:05:03Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2509","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2509","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2509.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2509.patch"},"body":"The fingerprint of a dataset changes if the cache directory is moved.\r\nI fixed that by setting the fingerprint to be the hash of:\r\n- the relative cache dir (dataset_name\/version\/config_id)\r\n- the requested split\r\n\r\nClose #2496 \r\n\r\nI had to fix an issue with the filelock filename that was too long (>255). It prevented the tests to run on my machine. I just added `hash_filename_if_too_long` in case this happens, to not get filenames longer than 255.\r\nWe usually have long filenames for filelocks because they are named after the path that is being locked. In case the path is a cache directory that has long directory names, then the filelock filename could en up being very long.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2508\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2508","id":921863173,"node_id":"MDU6SXNzdWU5MjE4NjMxNzM=","number":2508,"title":"Load Image Classification Dataset from Local ","user":{"login":"Jacobsolawetz","id":8428198,"node_id":"MDQ6VXNlcjg0MjgxOTg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8428198?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Jacobsolawetz","html_url":"https:\/\/github.com\/Jacobsolawetz","followers_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/followers","following_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/orgs","repos_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/repos","events_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Jacobsolawetz\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false},"assignees":[{"login":"nateraw","id":32437151,"node_id":"MDQ6VXNlcjMyNDM3MTUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32437151?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nateraw","html_url":"https:\/\/github.com\/nateraw","followers_url":"https:\/\/api.github.com\/users\/nateraw\/followers","following_url":"https:\/\/api.github.com\/users\/nateraw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nateraw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nateraw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nateraw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nateraw\/orgs","repos_url":"https:\/\/api.github.com\/users\/nateraw\/repos","events_url":"https:\/\/api.github.com\/users\/nateraw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nateraw\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-06-15T22:43:33Z","updated_at":"2021-07-12T18:03:54Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nYes - we would like to load an image classification dataset with datasets without having to write a custom data loader.\r\n\r\n**Describe the solution you'd like**\r\n\r\nGiven a folder structure with images of each class in each folder, the ability to load these folders into a HuggingFace dataset like \"cifar10\".\r\n\r\n**Describe alternatives you've considered**\r\n\r\nImplement ViT training outside of the HuggingFace Trainer and without datasets (we did this but prefer to stay on the main path)\r\n\r\nWrite custom data loader logic\r\n\r\n**Additional context**\r\n\r\nWe're training ViT on custom dataset\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2507\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2507","id":921441962,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcwNDQ0MDgz","number":2507,"title":"Rearrange JSON field names to match passed features schema field names","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-06-15T14:10:02Z","updated_at":"2021-06-16T10:47:49Z","closed_at":"2021-06-16T10:47:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2507","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2507","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2507.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2507.patch"},"body":"This PR depends on PR #2453 (which must be merged first).\r\n\r\nClose #2366.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2506\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2506","id":921435598,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcwNDM4NTgx","number":2506,"title":"Add course banner","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-15T14:03:54Z","updated_at":"2021-06-15T16:25:36Z","closed_at":"2021-06-15T16:25:35Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2506","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2506","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2506.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2506.patch"},"body":"This PR adds a course banner similar to the one you can now see in the [Transformers repo](https:\/\/github.com\/huggingface\/transformers) that links to the course. Let me know if placement seems right to you or not, I can move it just below the badges too.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2505\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2505","id":921234797,"node_id":"MDExOlB1bGxSZXF1ZXN0NjcwMjY2NjQy","number":2505,"title":"Make numpy arrow extractor faster","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-06-15T10:11:32Z","updated_at":"2021-06-28T09:53:39Z","closed_at":"2021-06-28T09:53:38Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2505","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2505","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2505.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2505.patch"},"body":"I changed the NumpyArrowExtractor to call directly to_numpy and see if it can lead to speed-ups as discussed in https:\/\/github.com\/huggingface\/datasets\/issues\/2498\r\n\r\nThis could make the numpy\/torch\/tf\/jax formatting faster","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2503\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2503","id":920636186,"node_id":"MDU6SXNzdWU5MjA2MzYxODY=","number":2503,"title":"SubjQA wrong boolean values in entries","user":{"login":"arnaudstiegler","id":26485052,"node_id":"MDQ6VXNlcjI2NDg1MDUy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26485052?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/arnaudstiegler","html_url":"https:\/\/github.com\/arnaudstiegler","followers_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/followers","following_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/orgs","repos_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/repos","events_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/arnaudstiegler\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-06-14T17:42:46Z","updated_at":"2021-06-15T08:41:00Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nSubjQA seems to have a boolean that's consistently wrong.\r\n\r\nIt defines:\r\n- question_subj_level: The subjectiviy level of the question (on a 1 to 5 scale with 1 being the most subjective).\r\n- is_ques_subjective: A boolean subjectivity label derived from question_subj_level (i.e., scores below 4 are considered as subjective)\r\n\r\nHowever, `is_ques_subjective` seems to have wrong values in the entire dataset.\r\n\r\nFor instance, in the example in the dataset card, we have:\r\n- \"question_subj_level\": 2\r\n- \"is_ques_subjective\": false\r\n\r\nHowever, according to the description, the question should be subjective since the `question_subj_level` is below 4\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2502\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2502","id":920623572,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5NzQ1MDA5","number":2502,"title":"JAX integration","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-14T17:24:23Z","updated_at":"2021-06-21T16:15:50Z","closed_at":"2021-06-21T16:15:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2502","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2502","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2502.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2502.patch"},"body":"Hi !\r\n\r\nI just added the \"jax\" formatting, as we already have for pytorch, tensorflow, numpy (and also pandas and arrow).\r\nIt does pretty much the same thing as the pytorch formatter except it creates jax.numpy.ndarray objects.\r\n\r\n```python\r\nfrom datasets import Dataset\r\n\r\nd = Dataset.from_dict({\"foo\": [[0., 1., 2.]]})\r\nd = d.with_format(\"jax\")\r\nd[0]\r\n# {'foo': DeviceArray([0., 1., 2.], dtype=float32)}\r\n```\r\n\r\nA few details:\r\n- The default integer precision for jax depends on the jax configuration `jax_enable_x64` (see [here](https:\/\/jax.readthedocs.io\/en\/latest\/notebooks\/Common_Gotchas_in_JAX.html#double-64bit-precision)), I took that into account. Unless `jax_enable_x64` is specified, it is int32 by default\r\n- AFAIK it's not possible to do a full conversion from arrow data to jax data. We are doing arrow -> numpy -> jax but the numpy -> jax part doesn't do zero copy unfortutanely (see [here](https:\/\/github.com\/google\/jax\/issues\/4486))\r\n- the env var for disabling JAX is `USE_JAX`. However I noticed that in `transformers` it is `USE_FLAX`. This is not an issue though IMO\r\n\r\nI also updated `convert_to_python_objects` to allow users to pass jax.numpy.ndarray objects to build a dataset.\r\n\r\nSince the `convert_to_python_objects` method became slow because it's the time when pytorch, tf (and now jax) are imported, I fixed it by checking the `sys.modules` to avoid unecessary import of pytorch, tf or jax.\r\n\r\nClose #2495","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2501\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2501","id":920579634,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5NzA3Nzc0","number":2501,"title":"Add Zenodo metadata file with license","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-06-14T16:28:12Z","updated_at":"2021-06-14T16:49:42Z","closed_at":"2021-06-14T16:49:42Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2501","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2501","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2501.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2501.patch"},"body":"This Zenodo metadata file fixes the name of the `Datasets` license appearing in the DOI as `\"Apache-2.0\"`, which otherwise by default is `\"other-open\"`.\r\n\r\nClose #2472. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2500\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2500","id":920471411,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5NjE2MjQ1","number":2500,"title":"Add load_dataset_builder","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-06-14T14:27:45Z","updated_at":"2021-07-09T00:08:16Z","closed_at":"2021-07-05T10:45:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2500","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2500","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2500.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2500.patch"},"body":"Adds the `load_dataset_builder` function. The good thing is that we can reuse this function to load the dataset info without downloading the dataset itself.\r\n\r\nTODOs:\r\n- [x] Add docstring and entry in the docs\r\n- [x] Add tests\r\n\r\nCloses #2484 \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2499\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2499","id":920413021,"node_id":"MDU6SXNzdWU5MjA0MTMwMjE=","number":2499,"title":" Python Programming Puzzles","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-14T13:27:18Z","updated_at":"2021-06-15T18:14:14Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Python Programming Puzzles\r\n- **Description:** Programming challenge called programming puzzles, as an objective and comprehensive evaluation of program synthesis\r\n- **Paper:** https:\/\/arxiv.org\/pdf\/2106.05784.pdf\r\n- **Data:** https:\/\/github.com\/microsoft\/PythonProgrammingPuzzles ([Scrolling through the data](https:\/\/github.com\/microsoft\/PythonProgrammingPuzzles\/blob\/main\/problems\/README.md))\r\n- **Motivation:** Spans a large range of difficulty, problems, and domains. A useful resource for evaluation as we don't have a clear understanding of the abilities and skills of extremely large LMs.\r\n\r\nNote: it's a growing dataset (contributions are welcome), so we'll need careful versioning for this dataset.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2498\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2498","id":920411285,"node_id":"MDU6SXNzdWU5MjA0MTEyODU=","number":2498,"title":"Improve torch formatting performance","user":{"login":"vblagoje","id":458335,"node_id":"MDQ6VXNlcjQ1ODMzNQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/458335?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vblagoje","html_url":"https:\/\/github.com\/vblagoje","followers_url":"https:\/\/api.github.com\/users\/vblagoje\/followers","following_url":"https:\/\/api.github.com\/users\/vblagoje\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vblagoje\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vblagoje\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vblagoje\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vblagoje\/orgs","repos_url":"https:\/\/api.github.com\/users\/vblagoje\/repos","events_url":"https:\/\/api.github.com\/users\/vblagoje\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vblagoje\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2021-06-14T13:25:24Z","updated_at":"2021-06-21T09:54:54Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nIt would be great, if possible, to further improve read performance of raw encoded datasets and their subsequent conversion to torch tensors. \r\n\r\nA bit more background. I am working on LM pre-training using HF ecosystem. We use encoded HF Wikipedia and BookCorpus datasets. The training machines are similar to DGX-1 workstations. We use HF trainer torch.distributed training approach on a single machine with 8 GPUs.\r\n\r\nThe current performance is about 30% slower than NVidia optimized BERT [examples](https:\/\/github.com\/NVIDIA\/DeepLearningExamples\/tree\/master\/PyTorch\/LanguageModeling) baseline. Quite a bit of customized code and training loop tricks were used to achieve the baseline performance. It would be great to achieve the same performance while using nothing more than off the shelf HF ecosystem. Perhaps, in the future, with @stas00 work on deepspeed integration, it could even be exceeded. \r\n\r\n**Describe the solution you'd like**\r\nUsing profiling tools we've observed that appx. 25% of cumulative run time is spent on data loader next call.\r\n![dataloader_next](https:\/\/user-images.githubusercontent.com\/458335\/121895543-59742a00-ccee-11eb-85fb-f07715e3f1f6.png)\r\n\r\nAs you can observe most of the data loader next call is spent in HF datasets torch_formatter.py format_batch call. \r\n\r\nDigging a bit deeper into format_batch we can see the following profiler data:\r\n![torch_formatter](https:\/\/user-images.githubusercontent.com\/458335\/121895944-c7b8ec80-ccee-11eb-95d5-5875c5716c30.png)\r\n\r\nOnce again, a lot of time is spent in pyarrow table conversion to pandas which seems like an intermediary step. Offline @lhoestq told me that this approach was, for some unknown reason, faster than direct to numpy conversion. \r\n\r\n**Describe alternatives you've considered**\r\nI am not familiar with pyarrow and have not yet considered the alternatives to the current approach. \r\n\r\nMost of the online advice around data loader performance improvements revolve around increasing number of workers, using pin memory for copying tensors from host device to gpus but we've already tried these avenues without much performance improvement. Weights & Biases dashboard for the pre-training task reports CPU utilization of ~ 10%, GPUs are completely saturated (GPU utilization is above 95% on all GPUs), while disk utilization is above 90%. \r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2497\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2497","id":920250382,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5NDI3OTU3","number":2497,"title":"Use default cast for sliced list arrays if pyarrow >= 4","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":2,"created_at":"2021-06-14T10:02:47Z","updated_at":"2021-06-15T18:06:18Z","closed_at":"2021-06-14T14:24:37Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2497","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2497","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2497.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2497.patch"},"body":"From pyarrow version 4, it is supported to cast sliced lists.\r\n\r\nThis PR uses default pyarrow cast in Datasets to cast sliced list arrays if pyarrow version is >= 4.\r\n\r\nIn relation with PR #2461 and #2490.\r\n\r\ncc: @lhoestq, @abhi1thakur, @SBrandeis","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2496\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2496","id":920216314,"node_id":"MDU6SXNzdWU5MjAyMTYzMTQ=","number":2496,"title":"Dataset fingerprint changes after moving the cache directory, which prevent cache reload when using `map`","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-14T09:20:26Z","updated_at":"2021-06-21T15:05:03Z","closed_at":"2021-06-21T15:05:03Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"`Dataset.map` uses the dataset fingerprint (a hash) for caching.\r\nHowever the fingerprint seems to change when someone moves the cache directory of the dataset.\r\n\r\nThis is because it uses the default fingerprint generation:\r\n1. the dataset path is used to get the fingerprint\r\n2. the modification times of the arrow file is also used to get the fingerprint\r\n\r\nTo fix that we could set the fingerprint of the dataset to be a hash of (, , , ), i.e. a hash of the the cache path relative to the cache directory.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2495\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2495","id":920170030,"node_id":"MDU6SXNzdWU5MjAxNzAwMzA=","number":2495,"title":"JAX formatting","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-14T08:32:07Z","updated_at":"2021-06-21T16:15:49Z","closed_at":"2021-06-21T16:15:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"We already support pytorch, tensorflow, numpy, pandas and arrow dataset formatting. Let's add jax as well","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2494\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2494","id":920149183,"node_id":"MDU6SXNzdWU5MjAxNDkxODM=","number":2494,"title":"Improve docs on Enhancing performance","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-14T08:11:48Z","updated_at":"2021-06-14T08:11:48Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"In the [\"Enhancing performance\"](https:\/\/huggingface.co\/docs\/datasets\/loading_datasets.html#enhancing-performance) section of docs, add specific use cases:\r\n- How to make datasets the fastest\r\n- How to make datasets take the less RAM\r\n- How to make datasets take the less hard drive mem\r\n\r\ncc: @thomwolf \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2493\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2493","id":919833281,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY5MDc4OTcw","number":2493,"title":"add tensorflow-macos support","user":{"login":"slayerjain","id":12831254,"node_id":"MDQ6VXNlcjEyODMxMjU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/12831254?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/slayerjain","html_url":"https:\/\/github.com\/slayerjain","followers_url":"https:\/\/api.github.com\/users\/slayerjain\/followers","following_url":"https:\/\/api.github.com\/users\/slayerjain\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/slayerjain\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/slayerjain\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/slayerjain\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/slayerjain\/orgs","repos_url":"https:\/\/api.github.com\/users\/slayerjain\/repos","events_url":"https:\/\/api.github.com\/users\/slayerjain\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/slayerjain\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-13T16:20:08Z","updated_at":"2021-06-15T08:53:06Z","closed_at":"2021-06-15T08:53:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2493","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2493","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2493.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2493.patch"},"body":"ref - https:\/\/github.com\/huggingface\/datasets\/issues\/2068","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2492\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2492","id":919718102,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4OTkxODk4","number":2492,"title":"Eduge","user":{"login":"enod","id":6023883,"node_id":"MDQ6VXNlcjYwMjM4ODM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6023883?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/enod","html_url":"https:\/\/github.com\/enod","followers_url":"https:\/\/api.github.com\/users\/enod\/followers","following_url":"https:\/\/api.github.com\/users\/enod\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/enod\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/enod\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/enod\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/enod\/orgs","repos_url":"https:\/\/api.github.com\/users\/enod\/repos","events_url":"https:\/\/api.github.com\/users\/enod\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/enod\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-13T05:10:59Z","updated_at":"2021-06-22T09:49:04Z","closed_at":"2021-06-16T10:41:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2492","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2492","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2492.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2492.patch"},"body":"Hi, awesome folks behind the huggingface! \r\n\r\nHere is my PR for the text classification dataset in Mongolian.\r\n\r\nPlease do let me know in case you have anything to clarify. \r\n\r\nThanks & Regards,\r\nEnod","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2491\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2491","id":919714506,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4OTg5MTUw","number":2491,"title":"add eduge classification dataset","user":{"login":"enod","id":6023883,"node_id":"MDQ6VXNlcjYwMjM4ODM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6023883?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/enod","html_url":"https:\/\/github.com\/enod","followers_url":"https:\/\/api.github.com\/users\/enod\/followers","following_url":"https:\/\/api.github.com\/users\/enod\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/enod\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/enod\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/enod\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/enod\/orgs","repos_url":"https:\/\/api.github.com\/users\/enod\/repos","events_url":"https:\/\/api.github.com\/users\/enod\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/enod\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-13T04:37:01Z","updated_at":"2021-06-13T05:06:48Z","closed_at":"2021-06-13T05:06:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2491","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2491","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2491.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2491.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2490\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2490","id":919571385,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4ODc4NDA3","number":2490,"title":"Allow latest pyarrow version","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":1,"created_at":"2021-06-12T14:17:34Z","updated_at":"2021-07-06T16:54:52Z","closed_at":"2021-06-14T07:53:23Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2490","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2490","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2490.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2490.patch"},"body":"Allow latest pyarrow version, once that version 4.0.1 fixes the segfault bug introduced in version 4.0.0.\r\n\r\nClose #2489.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2489\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2489","id":919569749,"node_id":"MDU6SXNzdWU5MTk1Njk3NDk=","number":2489,"title":"Allow latest pyarrow version once segfault bug is fixed","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-12T14:09:52Z","updated_at":"2021-06-14T07:53:23Z","closed_at":"2021-06-14T07:53:23Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As pointed out by @symeneses (see https:\/\/github.com\/huggingface\/datasets\/pull\/2268#issuecomment-860048613), pyarrow has fixed the segfault bug present in version 4.0.0 (see https:\/\/issues.apache.org\/jira\/browse\/ARROW-12568):\r\n- it was fixed on 3 May 2021\r\n- version 4.0.1 was released on 19 May 2021 with the bug fix","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2488\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2488","id":919500756,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4ODIwNDA1","number":2488,"title":"Set configurable downloaded datasets path","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-06-12T09:09:03Z","updated_at":"2021-06-14T09:13:27Z","closed_at":"2021-06-14T08:29:07Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2488","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2488","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2488.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2488.patch"},"body":"Part of #2480.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2487\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2487","id":919452407,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4Nzc5Mjk0","number":2487,"title":"Set configurable extracted datasets path","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":2,"created_at":"2021-06-12T05:47:29Z","updated_at":"2021-06-14T09:30:17Z","closed_at":"2021-06-14T09:02:56Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2487","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2487","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2487.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2487.patch"},"body":"Part of #2480.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2486\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2486","id":919174898,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4NTI2Njg3","number":2486,"title":"Add Rico Dataset","user":{"login":"ncoop57","id":7613470,"node_id":"MDQ6VXNlcjc2MTM0NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7613470?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ncoop57","html_url":"https:\/\/github.com\/ncoop57","followers_url":"https:\/\/api.github.com\/users\/ncoop57\/followers","following_url":"https:\/\/api.github.com\/users\/ncoop57\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ncoop57\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ncoop57\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ncoop57\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ncoop57\/orgs","repos_url":"https:\/\/api.github.com\/users\/ncoop57\/repos","events_url":"https:\/\/api.github.com\/users\/ncoop57\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ncoop57\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-11T20:17:41Z","updated_at":"2021-08-01T20:44:28Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2486","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2486","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2486.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2486.patch"},"body":"Hi there!\r\n\r\nI'm wanting to add the Rico datasets for software engineering type data to y'alls awesome library. However, as I have started coding, I've ran into a few hiccups so I thought it best to open the PR early to get a bit of discussion on how the Rico datasets should be added to the `datasets` lib.\r\n\r\n1) There are 7 different datasets under Rico and so I was wondering, should I make a folder for each or should I put them as different configurations of a single dataset?\r\nYou can see the datasets available for Rico here: http:\/\/interactionmining.org\/rico\r\n\r\n2) As of right now, I have a semi working version of the first dataset which has pairs of screenshots and hierarchies from android applications. However, these screenshots are very large (1440, 2560, 3) and there are 66,000 of them so I am not able to perform the processing that the `datasets` lib does after downloading and extracting the dataset since I run out of memory very fast. Is there a way to have `datasets` lib not put everything into memory while it is processing the dataset?\r\n\r\n2.1) If there is not a way, would it be better to just return the path to the screenshots instead of the actual image?\r\n\r\n3) The hierarchies are JSON objects and looking through the documentation of `datasets`, I didn't see any feature that I could use for this type of data. So, for now I just have it being read in as a string, is this okay or should I be doing it differently?\r\n\r\n4) One of the Rico datasets is a bunch of animations (GIFs), is there a `datasets` feature that I can put this type of data into or should I just return the path as a string?\r\n\r\nI appreciate any and all help I can get for this PR, I think the Rico datasets will be an awesome addition to the library :nerd_face: !","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2485\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2485","id":919099218,"node_id":"MDU6SXNzdWU5MTkwOTkyMTg=","number":2485,"title":"Implement layered building","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-11T18:54:25Z","updated_at":"2021-06-11T18:54:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As discussed with @stas00 and @lhoestq (see also here https:\/\/github.com\/huggingface\/datasets\/issues\/2481#issuecomment-859712190):\r\n\r\n> My suggestion for this would be to have this enabled by default.\r\n> \r\n> Plus I don't know if there should be a dedicated issue to that is another functionality. But I propose layered building rather than all at once. That is:\r\n>\r\n> 1. uncompress a handful of files via a generator enough to generate one arrow file\r\n> 2. process arrow file 1\r\n> 3. delete all the files that went in and aren't needed anymore.\r\n>\r\n> rinse and repeat.\r\n> \r\n> 1. This way much less disc space will be required - e.g. on JZ we won't be running into inode limitation, also it'd help with the collaborative hub training project\r\n> 2. The user doesn't need to go and manually clean up all the huge files that were left after pre-processing\r\n> 3. It would already include deleting temp files this issue is talking about\r\n> \r\n> I wonder if the new streaming API would be of help, except here the streaming would be into arrow files as the destination, rather than dataloaders.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2484\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2484","id":919092635,"node_id":"MDU6SXNzdWU5MTkwOTI2MzU=","number":2484,"title":"Implement loading a dataset builder","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"assignees":[{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-11T18:47:22Z","updated_at":"2021-07-05T10:45:57Z","closed_at":"2021-07-05T10:45:57Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As discussed with @stas00 and @lhoestq, this would allow things like:\r\n\r\n```python\r\nfrom datasets import load_dataset_builder\r\ndataset_name = \"openwebtext\"\r\nbuilder = load_dataset_builder(dataset_name)\r\nprint(builder.cache_dir)\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2483\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2483","id":918871712,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4MjU1Mjg1","number":2483,"title":"Use gc.collect only when needed to avoid slow downs","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-11T15:09:30Z","updated_at":"2021-06-18T19:25:06Z","closed_at":"2021-06-11T15:31:36Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2483","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2483","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2483.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2483.patch"},"body":"In https:\/\/github.com\/huggingface\/datasets\/commit\/42320a110d9d072703814e1f630a0d90d626a1e6 we added a call to gc.collect to resolve some issues on windows (see https:\/\/github.com\/huggingface\/datasets\/pull\/2482)\r\n\r\nHowever calling gc.collect too often causes significant slow downs (the CI run time doubled).\r\nSo I just moved the gc.collect call to the exact place where it's actually needed: when post-processing a dataset","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2482\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2482","id":918846027,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4MjMyMzI5","number":2482,"title":"Allow to use tqdm>=4.50.0","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-11T14:49:21Z","updated_at":"2021-06-11T15:11:51Z","closed_at":"2021-06-11T15:11:50Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2482","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2482","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2482.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2482.patch"},"body":"We used to have permission errors on windows whith the latest versions of tqdm (see [here](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/6365\/workflows\/24f7c960-3176-43a5-9652-7830a23a981e\/jobs\/39232))\r\n\r\nThey were due to open arrow files not properly closed by pyarrow.\r\nSince https:\/\/github.com\/huggingface\/datasets\/commit\/42320a110d9d072703814e1f630a0d90d626a1e6 gc.collect is called each time we don't need an arrow file to make sure that the files are closed.\r\n\r\nclose https:\/\/github.com\/huggingface\/datasets\/issues\/2471\r\n\r\ncc @lewtun ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2481\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2481","id":918680168,"node_id":"MDU6SXNzdWU5MTg2ODAxNjg=","number":2481,"title":"Delete extracted files to save disk space","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":1,"created_at":"2021-06-11T12:21:52Z","updated_at":"2021-07-19T09:08:18Z","closed_at":"2021-07-19T09:08:18Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As discussed with @stas00 and @lhoestq, allowing the deletion of extracted files would save a great amount of disk space to typical user.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2480\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2480","id":918678578,"node_id":"MDU6SXNzdWU5MTg2Nzg1Nzg=","number":2480,"title":"Set download\/extracted paths configurable","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":1,"created_at":"2021-06-11T12:20:24Z","updated_at":"2021-06-15T14:23:49Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As discussed with @stas00 and @lhoestq, setting these paths configurable may allow to overcome disk space limitation on different partitions\/drives.\r\n\r\nTODO:\r\n- [x] Set configurable extracted datasets path: #2487\r\n- [x] Set configurable downloaded datasets path: #2488\r\n- [ ] Set configurable \"incomplete\" datasets path?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2479\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2479","id":918672431,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY4MDc3NTI4","number":2479,"title":"\u274c load_datasets \u274c","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-11T12:14:36Z","updated_at":"2021-06-11T14:46:25Z","closed_at":"2021-06-11T14:46:25Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2479","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2479","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2479.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2479.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2478\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2478","id":918507510,"node_id":"MDU6SXNzdWU5MTg1MDc1MTA=","number":2478,"title":"Create release script","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-06-11T09:38:02Z","updated_at":"2021-06-11T09:38:02Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Create a script so that releases can be done automatically (as done in `transformers`).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2477\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2477","id":918334431,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY3NzczMTY0","number":2477,"title":"Fix docs custom stable version","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":4,"created_at":"2021-06-11T07:26:03Z","updated_at":"2021-06-14T09:14:20Z","closed_at":"2021-06-14T08:20:18Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2477","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2477","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2477.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2477.patch"},"body":"Currently docs default version is 1.5.0. This PR fixes this and sets the latest version instead.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2476\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2476","id":917686662,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY3MTg3OTk1","number":2476,"title":"Add TimeDial","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-10T18:33:07Z","updated_at":"2021-07-30T12:57:54Z","closed_at":"2021-07-30T12:57:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2476","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2476","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2476.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2476.patch"},"body":"Dataset: https:\/\/github.com\/google-research-datasets\/TimeDial\r\n\r\nTo-Do: Update README.md and add YAML tags","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2475\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2475","id":917650882,"node_id":"MDU6SXNzdWU5MTc2NTA4ODI=","number":2475,"title":"Issue in timit_asr database","user":{"login":"hrahamim","id":85702107,"node_id":"MDQ6VXNlcjg1NzAyMTA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/85702107?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hrahamim","html_url":"https:\/\/github.com\/hrahamim","followers_url":"https:\/\/api.github.com\/users\/hrahamim\/followers","following_url":"https:\/\/api.github.com\/users\/hrahamim\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hrahamim\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hrahamim\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hrahamim\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hrahamim\/orgs","repos_url":"https:\/\/api.github.com\/users\/hrahamim\/repos","events_url":"https:\/\/api.github.com\/users\/hrahamim\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hrahamim\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-10T18:05:29Z","updated_at":"2021-06-13T08:13:50Z","closed_at":"2021-06-13T08:13:13Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI am trying to load the timit_asr dataset however only the first record is shown (duplicated over all the rows).\r\nI am using the next code line\r\ndataset = load_dataset(\u201ctimit_asr\u201d, split=\u201ctest\u201d).shuffle().select(range(10))\r\n\r\nThe above code result with the same sentence duplicated ten times.\r\nIt also happens when I use the dataset viewer at Streamlit .\r\n\r\n## Steps to reproduce the bug\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\u201ctimit_asr\u201d, split=\u201ctest\u201d).shuffle().select(range(10))\r\ndata = dataset.to_pandas()\r\n\r\n# Sample code to reproduce the bug\r\n```\r\n\r\n## Expected results\r\ntable with different row information\r\n\r\n## Actual results\r\nSpecify the actual results or traceback.\r\n\r\n## Environment info\r\n\r\n\r\n- `datasets` version: 1.4.1 (also occur in the latest version)\r\n- Platform: Linux-4.15.0-143-generic-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.6.9\r\n- PyTorch version (GPU?): 1.8.1+cu102 (False)\r\n- Tensorflow version (GPU?): 1.15.3 (False)\r\n- Using GPU in script?: No\r\n- Using distributed or parallel set-up in script?: No\r\n\r\n- `datasets` version:\r\n- Platform:\r\n- Python version:\r\n- PyArrow version:\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2474\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2474","id":917622055,"node_id":"MDU6SXNzdWU5MTc2MjIwNTU=","number":2474,"title":"cache_dir parameter for load_from_disk ?","user":{"login":"TaskManager91","id":7063207,"node_id":"MDQ6VXNlcjcwNjMyMDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7063207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TaskManager91","html_url":"https:\/\/github.com\/TaskManager91","followers_url":"https:\/\/api.github.com\/users\/TaskManager91\/followers","following_url":"https:\/\/api.github.com\/users\/TaskManager91\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TaskManager91\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TaskManager91\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TaskManager91\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TaskManager91\/orgs","repos_url":"https:\/\/api.github.com\/users\/TaskManager91\/repos","events_url":"https:\/\/api.github.com\/users\/TaskManager91\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TaskManager91\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-06-10T17:39:36Z","updated_at":"2021-06-14T08:41:09Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nWhen using Google Colab big datasets can be an issue, as they won't fit on the VM's disk. Therefore mounting google drive could be a possible solution. Unfortunatly when loading my own dataset by using the _load_from_disk_ function, the data gets cached to the VM's disk:\r\n\r\n`\r\nfrom datasets import load_from_disk\r\n\r\nmyPreprocessedData = load_from_disk(\"\/content\/gdrive\/MyDrive\/ASR_data\/myPreprocessedData\")\r\n\r\n`\r\nI know that chaching on google drive could slow down learning. But at least it would run.\r\n\r\n**Describe the solution you'd like**\r\nAdd cache_Dir parameter to the load_from_disk function.\r\n\r\n**Describe alternatives you've considered**\r\nIt looks like you could write a custom loading script for the load_dataset function. But this seems to be much too complex for my use case. Is there perhaps a template here that uses the load_from_disk function?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2473\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2473","id":917538629,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY3MDU5MjI5","number":2473,"title":"Add Disfl-QA","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-10T16:18:00Z","updated_at":"2021-07-29T11:56:19Z","closed_at":"2021-07-29T11:56:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2473","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2473","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2473.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2473.patch"},"body":"Dataset: https:\/\/github.com\/google-research-datasets\/disfl-qa\r\n\r\nTo-Do: Update README.md and add YAML tags","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2472\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2472","id":917463821,"node_id":"MDU6SXNzdWU5MTc0NjM4MjE=","number":2472,"title":"Fix automatic generation of Zenodo DOI","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":4,"created_at":"2021-06-10T15:15:46Z","updated_at":"2021-06-14T16:49:42Z","closed_at":"2021-06-14T16:49:42Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"After the last release of Datasets (1.8.0), the automatic generation of the Zenodo DOI failed: it appears in yellow as \"Received\", instead of in green as \"Published\".\r\n\r\nI have contacted Zenodo support to fix this issue.\r\n\r\nTODO:\r\n- [x] Check with Zenodo to fix the issue\r\n- [x] Check BibTeX entry is right","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2471\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2471","id":917067165,"node_id":"MDU6SXNzdWU5MTcwNjcxNjU=","number":2471,"title":"Fix PermissionError on Windows when using tqdm >=4.50.0","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-06-10T08:31:49Z","updated_at":"2021-06-11T15:11:50Z","closed_at":"2021-06-11T15:11:50Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"See: https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/235\/workflows\/cfb6a39f-68eb-4802-8b17-2cd5e8ea7369\/jobs\/1111\r\n\r\n```\r\nPermissionError: [WinError 32] The process cannot access the file because it is being used by another process\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2470\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2470","id":916724260,"node_id":"MDU6SXNzdWU5MTY3MjQyNjA=","number":2470,"title":"Crash when `num_proc` > dataset length for `map()` on a `datasets.Dataset`.","user":{"login":"mbforbes","id":1170062,"node_id":"MDQ6VXNlcjExNzAwNjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1170062?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mbforbes","html_url":"https:\/\/github.com\/mbforbes","followers_url":"https:\/\/api.github.com\/users\/mbforbes\/followers","following_url":"https:\/\/api.github.com\/users\/mbforbes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mbforbes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mbforbes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mbforbes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mbforbes\/orgs","repos_url":"https:\/\/api.github.com\/users\/mbforbes\/repos","events_url":"https:\/\/api.github.com\/users\/mbforbes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mbforbes\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-06-09T22:40:22Z","updated_at":"2021-07-01T09:34:54Z","closed_at":"2021-07-01T09:11:13Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nCrash if when using `num_proc` > 1 (I used 16) for `map()` on a `datasets.Dataset`.\r\n\r\nI believe I've had cases where `num_proc` > 1 works before, but now it seems either inconsistent, or depends on my data. I'm not sure whether the issue is on my end, because it's difficult for me to debug! Any tips greatly appreciated, I'm happy to provide more info if it would helps us diagnose.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n# this function will be applied with map()\r\ndef tokenize_function(examples):\r\n return tokenizer(\r\n examples[\"text\"],\r\n padding=PaddingStrategy.DO_NOT_PAD,\r\n truncation=True,\r\n )\r\n\r\n# data_files is a Dict[str, str] mapping name -> path\r\ndatasets = load_dataset(\"text\", data_files={...}) \r\n\r\n# this is where the error happens if num_proc = 16,\r\n# but is fine if num_proc = 1\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=num_workers,\r\n)\r\n```\r\n\r\n## Expected results\r\nThe `map()` function succeeds with `num_proc` > 1.\r\n\r\n## Actual results\r\n![image](https:\/\/user-images.githubusercontent.com\/1170062\/121404271-a6cc5200-c910-11eb-8e27-5c893bd04042.png)\r\n![image](https:\/\/user-images.githubusercontent.com\/1170062\/121404362-be0b3f80-c910-11eb-9117-658943029aef.png)\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-5.4.0-73-generic-x86_64-with-glibc2.31\r\n- Python version: 3.9.5\r\n- PyTorch version (GPU?): 1.8.1+cu111 (True)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: Yes, but I think N\/A for this issue\r\n- Using distributed or parallel set-up in script?: Multi-GPU on one machine, but I think also N\/A for this issue\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2469\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2469","id":916440418,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY2MTA1OTk1","number":2469,"title":"Bump tqdm version","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-09T17:24:40Z","updated_at":"2021-06-11T15:03:42Z","closed_at":"2021-06-11T15:03:36Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2469","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2469","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2469.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2469.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2468\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2468","id":916427320,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY2MDk0ODI5","number":2468,"title":"Implement ClassLabel encoding in JSON loader","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":1,"created_at":"2021-06-09T17:08:54Z","updated_at":"2021-06-28T15:39:54Z","closed_at":"2021-06-28T15:05:35Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2468","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2468","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2468.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2468.patch"},"body":"Close #2365.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2466\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2466","id":915914098,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1NjY1MjQy","number":2466,"title":"change udpos features structure","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-09T08:03:31Z","updated_at":"2021-06-18T11:55:09Z","closed_at":"2021-06-16T10:41:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2466","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2466","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2466.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2466.patch"},"body":"The structure is change such that each example is a sentence\r\nThe change is done for issues:\r\n#2061 \r\n#2444 \r\n\r\nClose #2061 , close #2444.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2465\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2465","id":915525071,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1MzMxMDMz","number":2465,"title":"adding masahaner dataset","user":{"login":"dadelani","id":23586676,"node_id":"MDQ6VXNlcjIzNTg2Njc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23586676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dadelani","html_url":"https:\/\/github.com\/dadelani","followers_url":"https:\/\/api.github.com\/users\/dadelani\/followers","following_url":"https:\/\/api.github.com\/users\/dadelani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dadelani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dadelani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dadelani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dadelani\/orgs","repos_url":"https:\/\/api.github.com\/users\/dadelani\/repos","events_url":"https:\/\/api.github.com\/users\/dadelani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dadelani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-08T21:20:25Z","updated_at":"2021-06-14T14:59:05Z","closed_at":"2021-06-14T14:59:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2465","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2465","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2465.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2465.patch"},"body":"Adding Masakhane dataset https:\/\/github.com\/masakhane-io\/masakhane-ner \r\n\r\n@lhoestq , can you please review","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2464\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2464","id":915485601,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1Mjk1MDE5","number":2464,"title":"fix: adjusting indexing for the labels.","user":{"login":"drugilsberg","id":5406908,"node_id":"MDQ6VXNlcjU0MDY5MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5406908?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/drugilsberg","html_url":"https:\/\/github.com\/drugilsberg","followers_url":"https:\/\/api.github.com\/users\/drugilsberg\/followers","following_url":"https:\/\/api.github.com\/users\/drugilsberg\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/drugilsberg\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/drugilsberg\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/drugilsberg\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/drugilsberg\/orgs","repos_url":"https:\/\/api.github.com\/users\/drugilsberg\/repos","events_url":"https:\/\/api.github.com\/users\/drugilsberg\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/drugilsberg\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-08T20:47:25Z","updated_at":"2021-06-09T10:15:46Z","closed_at":"2021-06-09T09:10:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2464","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2464","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2464.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2464.patch"},"body":"The labels index were mismatching the actual ones used in the dataset. Specifically `0` is used for `SUPPORTS` and `1` is used for `REFUTES`\r\nAfter this change, the `README.md` now reflects the content of `dataset_infos.json`.\r\n\r\nSigned-off-by: Matteo Manica ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2463\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2463","id":915454788,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1MjY3NTA2","number":2463,"title":"Fix proto_qa download link","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-08T20:23:16Z","updated_at":"2021-06-10T12:49:56Z","closed_at":"2021-06-10T08:31:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2463","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2463","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2463.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2463.patch"},"body":"Fixes #2459 \r\n\r\nInstead of updating the path, this PR fixes a commit hash as suggested by @lhoestq.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2462\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2462","id":915384613,"node_id":"MDU6SXNzdWU5MTUzODQ2MTM=","number":2462,"title":"Merge DatasetDict and Dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":5,"closed_issues":2,"state":"open","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-08-04T17:03:52Z","due_on":"2021-08-21T07:00:00Z","closed_at":null},"comments":0,"created_at":"2021-06-08T19:22:04Z","updated_at":"2021-07-21T15:36:06Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As discussed in #2424 and #2437 (please see there for detailed conversation):\r\n- It would be desirable to improve UX with respect the confusion between DatasetDict and Dataset.\r\n- The difference between Dataset and DatasetDict is an additional abstraction complexity that confuses \"typical\" end users. \r\n- A user expects a \"Dataset\" (whatever it contains multiple or a single split) and maybe it could be interesting to try to simplify the user-facing API as much as possible to hide this complexity from the end user.\r\n\r\nHere is a proposal for discussion and refined (and potential abandon if it's not good enough):\r\n- let's consider that a DatasetDict is also a Dataset with the various split concatenated one after the other\r\n- let's disallow the use of integers in split names (probably not a very big breaking change)\r\n- when you index with integers you access the examples progressively in split after the other is finished (in a deterministic order)\r\n- when you index with strings\/split name you have the same behavior as now (full backward compat)\r\n- let's then also have all the methods of a Dataset on the DatasetDict\r\n\r\nThe end goal would be to merge both Dataset and DatasetDict object in a single object that would be (pretty much totally) backward compatible with both.\r\n\r\n\r\nThere are a few things that we could discuss if we want to merge Dataset and DatasetDict:\r\n\r\n1. what happens if you index by a string ? Does it return the column or the split ? We could disallow conflicts between column names and split names to avoid ambiguities. It can be surprising to be able to get a column or a split using the same indexing feature\r\n ```\r\n from datasets import load_dataset\r\n\r\n dataset = load_dataset(...)\r\n dataset[\"train\"]\r\n dataset[\"input_ids\"]\r\n ```\r\n2. what happens when you iterate over the object ? I guess it should iterate over the examples as a Dataset object, but a DatasetDict used to iterate over the splits as they are the dictionary keys. This is a breaking change that we can discuss.\r\n\r\nMoreover regarding your points:\r\n\r\n- integers are not allowed as split names already\r\n- it's definitely doable to have all the methods. Maybe some of them like train_test_split that is currently only available for Dataset can be tweaked to work for a split dataset\r\n\r\n\r\ncc: @thomwolf @lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2461\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2461","id":915286150,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1MTE3MTY4","number":2461,"title":"Support sliced list arrays in cast","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-08T17:38:47Z","updated_at":"2021-06-08T17:56:24Z","closed_at":"2021-06-08T17:56:23Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2461","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2461","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2461.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2461.patch"},"body":"There is this issue in pyarrow:\r\n```python\r\nimport pyarrow as pa\r\n\r\narr = pa.array([[i * 10] for i in range(4)])\r\narr.cast(pa.list_(pa.int32())) # works\r\n\r\narr = arr.slice(1)\r\narr.cast(pa.list_(pa.int32())) # fails\r\n# ArrowNotImplementedError(\"Casting sliced lists (non-zero offset) not yet implemented\")\r\n```\r\n\r\nHowever in `Dataset.cast` we slice tables to cast their types (it's memory intensive), so we have the same issue.\r\nBecause of this it is currently not possible to cast a Dataset with a Sequence feature type (unless the table is small enough to not be sliced).\r\n\r\nIn this PR I fixed this by resetting the offset of `pyarrow.ListArray` arrays to zero in the table before casting.\r\nI used `pyarrow.compute.subtract` function to update the offsets of the ListArray.\r\n\r\ncc @abhi1thakur @SBrandeis ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2460\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2460","id":915268536,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY1MTAyMjA4","number":2460,"title":"Revert default in-memory for small datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/4","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/4","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/4\/labels","id":6680642,"node_id":"MDk6TWlsZXN0b25lNjY4MDY0Mg==","number":4,"title":"1.8","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":2,"state":"closed","created_at":"2021-04-20T16:49:16Z","updated_at":"2021-06-08T18:51:37Z","due_on":"2021-06-08T07:00:00Z","closed_at":"2021-06-08T18:51:04Z"},"comments":1,"created_at":"2021-06-08T17:14:23Z","updated_at":"2021-06-08T18:04:14Z","closed_at":"2021-06-08T17:55:43Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2460","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2460","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2460.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2460.patch"},"body":"Close #2458","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2459\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2459","id":915222015,"node_id":"MDU6SXNzdWU5MTUyMjIwMTU=","number":2459,"title":"`Proto_qa` hosting seems to be broken","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-08T16:16:32Z","updated_at":"2021-06-10T08:31:09Z","closed_at":"2021-06-10T08:31:09Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe hosting (on Github) of the `proto_qa` dataset seems broken. I haven't investigated more yet, just flagging it for now. \r\n\r\n@zaidalyafeai if you want to dive into it, I think it's just a matter of changing the links in `proto_qa.py`\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"proto_qa\")\r\n```\r\n\r\n## Actual results\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 751, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 575, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 630, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/hf\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/proto_qa\/445346efaad5c5f200ecda4aa7f0fb50ff1b55edde3003be424a2112c3e8102e\/proto_qa.py\", line 131, in _split_generators\r\n train_fpath = dl_manager.download(_URLs[self.config.name][\"train\"])\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 199, in download\r\n num_proc=download_config.num_proc,\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 195, in map_nested\r\n return function(data_struct)\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/download_manager.py\", line 218, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/home\/hf\/dev\/promptsource\/.venv\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/iesl\/protoqa-data\/master\/data\/train\/protoqa_train.jsonl\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2458\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2458","id":915199693,"node_id":"MDU6SXNzdWU5MTUxOTk2OTM=","number":2458,"title":"Revert default in-memory for small datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/4","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/4","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/4\/labels","id":6680642,"node_id":"MDk6TWlsZXN0b25lNjY4MDY0Mg==","number":4,"title":"1.8","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":2,"state":"closed","created_at":"2021-04-20T16:49:16Z","updated_at":"2021-06-08T18:51:37Z","due_on":"2021-06-08T07:00:00Z","closed_at":"2021-06-08T18:51:04Z"},"comments":1,"created_at":"2021-06-08T15:51:41Z","updated_at":"2021-06-08T18:57:11Z","closed_at":"2021-06-08T17:55:43Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Users are reporting issues and confusion about setting default in-memory to True for small datasets.\r\n\r\nWe see 2 clear use cases of Datasets:\r\n- the \"canonical\" way, where you can work with very large datasets, as they are memory-mapped and cached (after every transformation)\r\n- some edge cases (speed benchmarks, interactive\/exploratory analysis,...), where default in-memory can explicitly be enabled, and no caching will be done\r\n\r\nAfter discussing with @lhoestq we have agreed to:\r\n- revert this feature (implemented in #2182)\r\n- explain in the docs how to optimize speed\/performance by setting default in-memory\r\n\r\ncc: @stas00 https:\/\/github.com\/huggingface\/datasets\/pull\/2409#issuecomment-856210552","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2457\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457","id":915079441,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY0OTQwMzQ0","number":2457,"title":"Add align_labels_with_mapping function","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-08T13:54:00Z","updated_at":"2021-06-17T10:17:06Z","closed_at":"2021-06-17T09:56:52Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2457","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2457.patch"},"body":"This PR adds a helper function to align the `label2id` mapping between a `datasets.Dataset` and a classifier (e.g. a transformer with a `PretrainedConfig.label2id` dict), with the alignment performed on the dataset itself.\r\n\r\nThis will help us with the Hub evaluation, where we won't know in advance whether a model that is fine-tuned on say MNLI has the same mappings as the MNLI dataset we load from `datasets`.\r\n\r\nAn example where this is needed is if we naively try to evaluate `microsoft\/deberta-base-mnli` on `mnli` because the model config has the following mappings:\r\n\r\n```python\r\n \"id2label\": {\r\n \"0\": \"CONTRADICTION\",\r\n \"1\": \"NEUTRAL\",\r\n \"2\": \"ENTAILMENT\"\r\n },\r\n \"label2id\": {\r\n \"CONTRADICTION\": 0,\r\n \"ENTAILMENT\": 2,\r\n \"NEUTRAL\": 1\r\n }\r\n```\r\n\r\nwhile the `mnli` dataset has the `contradiction` and `neutral` labels swapped:\r\n\r\n```python\r\nid2label = {0: 'entailment', 1: 'neutral', 2: 'contradiction'}\r\nlabel2id = {'contradiction': 2, 'entailment': 0, 'neutral': 1}\r\n```\r\n\r\nAs a result, we get a much lower accuracy during evaluation:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom transformers.trainer_utils import EvalPrediction\r\nfrom transformers import AutoModelForSequenceClassification, Trainer\r\n\r\n# load dataset for evaluation\r\nmnli = load_dataset(\"glue\", \"mnli\", split=\"test\")\r\n# load model\r\nmodel_ckpt = \"microsoft\/deberta-base-mnli\"\r\nmodel = AutoModelForSequenceClassification.from_pretrained(checkpoint)\r\n# preprocess, create trainer ...\r\nmnli_enc = ...\r\ntrainer = Trainer(model, args=args, tokenizer=tokenizer)\r\n# generate preds\r\npreds = trainer.predict(mnli_enc)\r\n# preds.label_ids misalinged with model.config => returns wrong accuracy (too low)!\r\ncompute_metrics(EvalPrediction(preds.predictions, preds.label_ids))\r\n```\r\n\r\nThe fix is to use the helper function before running the evaluation to make sure the label IDs are aligned:\r\n\r\n```python\r\nmnli_enc_aligned = mnli_enc.align_labels_with_mapping(label2id=config.label2id, label_column=\"label\")\r\n# preds now aligned and everyone is happy :)\r\npreds = trainer.predict(mnli_enc_aligned)\r\n```\r\n\r\ncc @thomwolf @lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2456\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2456","id":914709293,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY0NjAwOTk1","number":2456,"title":"Fix cross-reference typos in documentation","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-08T09:45:14Z","updated_at":"2021-06-08T17:41:37Z","closed_at":"2021-06-08T17:41:36Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2456","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2456","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2456.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2456.patch"},"body":"Fix some minor typos in docs that avoid the creation of cross-reference links.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2455\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2455","id":914177468,"node_id":"MDExOlB1bGxSZXF1ZXN0NjY0MTEzNjg2","number":2455,"title":"Update version in xor_tydi_qa.py","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-08T02:23:45Z","updated_at":"2021-06-14T15:35:25Z","closed_at":"2021-06-14T15:35:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2455","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2455","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2455.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2455.patch"},"body":"Fix #2449\r\n\r\n@lhoestq Should I revert to the old `dummy\/1.0.0` or delete it and keep only `dummy\/1.1.0`?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2454\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2454","id":913883631,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYzODUyODU1","number":2454,"title":"Rename config and environment variable for in memory max size","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-07T19:21:08Z","updated_at":"2021-06-07T20:43:46Z","closed_at":"2021-06-07T20:43:46Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2454","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2454","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2454.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2454.patch"},"body":"As discussed in #2409, both config and environment variable have been renamed.\r\n\r\ncc: @stas00, huggingface\/transformers#12056","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2453\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2453","id":913729258,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYzNzE3NTk2","number":2453,"title":"Keep original features order","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":5,"created_at":"2021-06-07T16:26:38Z","updated_at":"2021-06-15T18:05:36Z","closed_at":"2021-06-15T15:43:48Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2453","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2453","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2453.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2453.patch"},"body":"When loading a Dataset from a JSON file whose column names are not sorted alphabetically, we should get the same column name order, whether we pass features (in the same order as in the file) or not.\r\n\r\nI found this issue while working on #2366.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2452\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2452","id":913603877,"node_id":"MDU6SXNzdWU5MTM2MDM4Nzc=","number":2452,"title":"MRPC test set differences between torch and tensorflow datasets","user":{"login":"FredericOdermatt","id":50372080,"node_id":"MDQ6VXNlcjUwMzcyMDgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50372080?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/FredericOdermatt","html_url":"https:\/\/github.com\/FredericOdermatt","followers_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/followers","following_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/orgs","repos_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/repos","events_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/FredericOdermatt\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-07T14:20:26Z","updated_at":"2021-06-07T14:34:32Z","closed_at":"2021-06-07T14:34:32Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen using `load_dataset(\"glue\", \"mrpc\")` to load the MRPC dataset, the test set includes the labels. When using `tensorflow_datasets.load('glue\/{}'.format('mrpc'))` to load the dataset the test set does not contain the labels. There should be consistency between torch and tensorflow ways of importing the GLUE datasets.\r\n\r\n## Steps to reproduce the bug\r\n\r\nMinimal working code \r\n```python\r\nfrom datasets import load_dataset\r\nimport tensorflow as tf\r\nimport tensorflow_datasets\r\n\r\n# torch\r\ndataset = load_dataset(\"glue\", \"mrpc\")\r\n# tf\r\ndata = tensorflow_datasets.load('glue\/{}'.format('mrpc'))\r\ndata = list(data['test'].as_numpy_iterator())\r\nfor i in range(40,50):\r\n tf_sentence1 = data[i]['sentence1'].decode(\"utf-8\") \r\n tf_sentence2 = data[i]['sentence2'].decode(\"utf-8\") \r\n\r\n tf_label = data[i]['label']\r\n \r\n index = data[i]['idx']\r\n print('Index {}'.format(index))\r\n torch_sentence1 = dataset['test']['sentence1'][index]\r\n torch_sentence2 = dataset['test']['sentence2'][index]\r\n\r\n torch_label = dataset['test']['label'][index]\r\n print('Tensorflow: \\n\\tSentence1 {}\\n\\tSentence2 {}\\n\\tLabel {}'.format(tf_sentence1, tf_sentence2, tf_label))\r\n print('Torch: \\n\\tSentence1 {}\\n\\tSentence2 {}\\n\\tLabel {}'.format(torch_sentence1, torch_sentence2, torch_label))\r\n```\r\n\r\nSample output \r\n```\r\nIndex 954\r\nTensorflow: \r\n\tSentence1 Sabri Yakou , an Iraqi native who is a legal U.S. resident , appeared before a federal magistrate yesterday on charges of violating U.S. arms-control laws .\r\n\tSentence2 The elder Yakou , an Iraqi native who is a legal U.S. resident , appeared before a federal magistrate Wednesday on charges of violating U.S. arms control laws .\r\n\tLabel -1\r\nTorch: \r\n\tSentence1 Sabri Yakou , an Iraqi native who is a legal U.S. resident , appeared before a federal magistrate yesterday on charges of violating U.S. arms-control laws .\r\n\tSentence2 The elder Yakou , an Iraqi native who is a legal U.S. resident , appeared before a federal magistrate Wednesday on charges of violating U.S. arms control laws .\r\n\tLabel 1\r\nIndex 711\r\nTensorflow: \r\n\tSentence1 Others keep records sealed for as little as five years or as much as 30 .\r\n\tSentence2 Some states make them available immediately ; others keep them sealed for as much as 30 years .\r\n\tLabel -1\r\nTorch: \r\n\tSentence1 Others keep records sealed for as little as five years or as much as 30 .\r\n\tSentence2 Some states make them available immediately ; others keep them sealed for as much as 30 years .\r\n\tLabel 0\r\n```\r\n\r\n## Expected results\r\nI would expect the datasets to be independent of whether I am working with torch or tensorflow.\r\n\r\n## Actual results\r\nTest set labels are provided in the `datasets.load_datasets()` for MRPC. However MRPC is the only task where the test set labels are not -1.\r\n\r\n## Environment info\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.4.109+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2451\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2451","id":913263340,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYzMzIwNDY1","number":2451,"title":"Mention that there are no answers in adversarial_qa test set","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-07T08:13:57Z","updated_at":"2021-06-07T08:34:14Z","closed_at":"2021-06-07T08:34:13Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2451","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2451","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2451.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2451.patch"},"body":"As mention in issue https:\/\/github.com\/huggingface\/datasets\/issues\/2447, there are no answers in the test set","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2450\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2450","id":912890291,"node_id":"MDU6SXNzdWU5MTI4OTAyOTE=","number":2450,"title":"BLUE file not found","user":{"login":"mirfan899","id":3822565,"node_id":"MDQ6VXNlcjM4MjI1NjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3822565?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mirfan899","html_url":"https:\/\/github.com\/mirfan899","followers_url":"https:\/\/api.github.com\/users\/mirfan899\/followers","following_url":"https:\/\/api.github.com\/users\/mirfan899\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mirfan899\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mirfan899\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mirfan899\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mirfan899\/orgs","repos_url":"https:\/\/api.github.com\/users\/mirfan899\/repos","events_url":"https:\/\/api.github.com\/users\/mirfan899\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mirfan899\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-06T17:01:54Z","updated_at":"2021-06-07T10:46:15Z","closed_at":"2021-06-07T10:46:15Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, I'm having the following issue when I try to load the `blue` metric.\r\n\r\n```shell\r\nimport datasets\r\nmetric = datasets.load_metric('blue')\r\nTraceback (most recent call last):\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 320, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.7.0\/metrics\/blue\/blue.py\r\nDuring handling of the above exception, another exception occurred:\r\nTraceback (most recent call last):\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 332, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/metrics\/blue\/blue.py\r\nDuring handling of the above exception, another exception occurred:\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 605, in load_metric\r\n dataset=False,\r\n File \"\/home\/irfan\/environments\/Perplexity_Transformers\/lib\/python3.6\/site-packages\/datasets\/load.py\", line 343, in prepare_module\r\n combined_path, github_file_path\r\nFileNotFoundError: Couldn't find file locally at blue\/blue.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.7.0\/metrics\/blue\/blue.py.\r\nThe file is also not present on the master branch on github.\r\n```\r\nHere is dataset installed version info\r\n```shell\r\npip freeze | grep datasets\r\ndatasets==1.7.0\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2449\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2449","id":912751752,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYyODg1ODUz","number":2449,"title":"Update `xor_tydi_qa` url to v1.1","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-06-06T09:44:58Z","updated_at":"2021-06-07T15:16:21Z","closed_at":"2021-06-07T08:31:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2449","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2449","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2449.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2449.patch"},"body":"The dataset is updated and the old url no longer works. So I updated it.\r\n\r\nI faced a bug while trying to fix this. Documenting the solution here. Maybe we can add it to the doc (`CONTRIBUTING.md` and `ADD_NEW_DATASET.md`).\r\n> And to make the command work without the ExpectedMoreDownloadedFiles error, you just need to use the --ignore_verifications flag.\r\nhttps:\/\/github.com\/huggingface\/datasets\/issues\/2076#issuecomment-803904366","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2448\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2448","id":912360109,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYyNTI2NjA3","number":2448,"title":"Fix flores download link","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-05T17:30:24Z","updated_at":"2021-06-08T20:02:58Z","closed_at":"2021-06-07T08:18:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2448","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2448","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2448.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2448.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2447\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2447","id":912299527,"node_id":"MDU6SXNzdWU5MTIyOTk1Mjc=","number":2447,"title":"dataset adversarial_qa has no answers in the \"test\" set","user":{"login":"bjascob","id":22728060,"node_id":"MDQ6VXNlcjIyNzI4MDYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22728060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bjascob","html_url":"https:\/\/github.com\/bjascob","followers_url":"https:\/\/api.github.com\/users\/bjascob\/followers","following_url":"https:\/\/api.github.com\/users\/bjascob\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bjascob\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bjascob\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bjascob\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bjascob\/orgs","repos_url":"https:\/\/api.github.com\/users\/bjascob\/repos","events_url":"https:\/\/api.github.com\/users\/bjascob\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bjascob\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-05T14:57:38Z","updated_at":"2021-06-07T11:13:07Z","closed_at":"2021-06-07T11:13:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen loading the adversarial_qa dataset the 'test' portion has no answers. Only the 'train' and 'validation' portions do. This occurs with all four of the configs ('adversarialQA', 'dbidaf', 'dbert', 'droberta')\r\n\r\n## Steps to reproduce the bug\r\n```\r\nfrom datasets import load_dataset\r\nexamples = load_dataset('adversarial_qa', 'adversarialQA', script_version=\"master\")['test']\r\nprint('Loaded {:,} examples'.format(len(examples)))\r\nhas_answers = 0\r\nfor e in examples:\r\n if e['answers']['text']:\r\n has_answers += 1\r\nprint('{:,} have answers'.format(has_answers))\r\n>>> Loaded 3,000 examples\r\n>>> 0 have answers\r\n\r\nexamples = load_dataset('adversarial_qa', 'adversarialQA', script_version=\"master\")['validation']\r\n<...code above...>\r\n>>> Loaded 3,000 examples\r\n>>> 3,000 have answers\r\n```\r\n\r\n## Expected results\r\nIf 'test' is a valid dataset, it should have answers. Also note that all of the 'train' and 'validation' sets have answers, there are no \"no answer\" questions with this set (not sure if this is correct or not).\r\n\r\n## Environment info\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.8.0-53-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.5\r\n- PyArrow version: 1.0.0\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2446\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2446","id":911635399,"node_id":"MDU6SXNzdWU5MTE2MzUzOTk=","number":2446,"title":"`yelp_polarity` is broken","user":{"login":"JetRunner","id":22514219,"node_id":"MDQ6VXNlcjIyNTE0MjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22514219?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JetRunner","html_url":"https:\/\/github.com\/JetRunner","followers_url":"https:\/\/api.github.com\/users\/JetRunner\/followers","following_url":"https:\/\/api.github.com\/users\/JetRunner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JetRunner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JetRunner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JetRunner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JetRunner\/orgs","repos_url":"https:\/\/api.github.com\/users\/JetRunner\/repos","events_url":"https:\/\/api.github.com\/users\/JetRunner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JetRunner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-04T15:44:29Z","updated_at":"2021-06-04T18:56:47Z","closed_at":"2021-06-04T18:56:47Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/22514219\/120828150-c4a35b00-c58e-11eb-8083-a537cee4dbb3.png)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2445\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2445","id":911577578,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYxODMzMTky","number":2445,"title":"Fix broken URLs for bn_hate_speech and covid_tweets_japanese","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-04T14:53:35Z","updated_at":"2021-06-04T17:39:46Z","closed_at":"2021-06-04T17:39:45Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2445","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2445","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2445.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2445.patch"},"body":"Closes #2388 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2444\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2444","id":911297139,"node_id":"MDU6SXNzdWU5MTEyOTcxMzk=","number":2444,"title":"Sentence Boundaries missing in Dataset: xtreme \/ udpos","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-04T09:10:26Z","updated_at":"2021-06-18T11:53:43Z","closed_at":"2021-06-18T11:53:43Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I was browsing through annotation guidelines, as suggested by the datasets introduction.\r\n\r\nThe guidlines saids \"There must be exactly one blank line after every sentence, including the last sentence in the file. Empty sentences are not allowed.\" in the [Sentence Boundaries and Comments section](https:\/\/universaldependencies.org\/format.html#sentence-boundaries-and-comments)\r\n\r\nBut the sentence boundaries seems not to be represented by huggingface datasets features well. I found out that multiple sentence are concatenated together as a 1D array, without any delimiter.\r\n\r\nPAN-x, which is another token classification subset from xtreme do represent the sentence boundary using a 2D array.\r\n\r\nYou may compare in PAN-x.en and udpos.English in the explorer:\r\n https:\/\/huggingface.co\/datasets\/viewer\/?dataset=xtreme","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2443\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2443","id":909983574,"node_id":"MDU6SXNzdWU5MDk5ODM1NzQ=","number":2443,"title":"Some tests hang on Windows","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-06-03T00:27:30Z","updated_at":"2021-06-28T08:47:39Z","closed_at":"2021-06-28T08:47:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Currently, several tests hang on Windows if the max path limit of 260 characters is not disabled. This happens due to the changes introduced by #2223 that cause an infinite loop in `WindowsFileLock` described in #2220. This can be very tricky to debug, so I think now is a good time to address these issues\/PRs. IMO throwing an error is too harsh, but maybe we can emit a warning in the top-level `__init__.py ` on startup if long paths are not enabled.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2442\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2442","id":909677029,"node_id":"MDExOlB1bGxSZXF1ZXN0NjYwMjE1ODY1","number":2442,"title":"add english language tags for ~100 datasets","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-06-02T16:24:56Z","updated_at":"2021-06-04T09:51:40Z","closed_at":"2021-06-04T09:51:39Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2442","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2442","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2442.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2442.patch"},"body":"As discussed on Slack, I have manually checked for ~100 datasets that they have at least one subset in English. This information was missing so adding into the READMEs.\r\n\r\nNote that I didn't check all the subsets so it's possible that some of the datasets have subsets in other languages than English...","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2441\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2441","id":908554713,"node_id":"MDU6SXNzdWU5MDg1NTQ3MTM=","number":2441,"title":"DuplicatedKeysError on personal dataset","user":{"login":"lucaguarro","id":22605313,"node_id":"MDQ6VXNlcjIyNjA1MzEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22605313?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucaguarro","html_url":"https:\/\/github.com\/lucaguarro","followers_url":"https:\/\/api.github.com\/users\/lucaguarro\/followers","following_url":"https:\/\/api.github.com\/users\/lucaguarro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucaguarro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucaguarro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucaguarro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucaguarro\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucaguarro\/repos","events_url":"https:\/\/api.github.com\/users\/lucaguarro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucaguarro\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-06-01T17:59:41Z","updated_at":"2021-06-04T23:50:03Z","closed_at":"2021-06-04T23:50:03Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nEver since today, I have been getting a DuplicatedKeysError while trying to load my dataset from my own script.\r\nError returned when running this line: `dataset = load_dataset('\/content\/drive\/MyDrive\/Thesis\/Datasets\/book_preprocessing\/goodreads_maharjan_trimmed_and_nered\/goodreadsnered.py')`\r\nNote that my script was working fine with earlier versions of the Datasets library. Cannot say with 100% certainty if I have been doing something wrong with my dataset script this whole time or if this is simply a bug with the new version of datasets.\r\n\r\n## Steps to reproduce the bug\r\nI cannot provide code to reproduce the error as I am working with my own dataset. I can however provide my script if requested.\r\n\r\n## Expected results\r\nFor my data to be loaded.\r\n\r\n## Actual results\r\n**DuplicatedKeysError** exception is raised\r\n```\r\nDownloading and preparing dataset good_reads_practice_dataset\/main_domain (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/root\/.cache\/huggingface\/datasets\/good_reads_practice_dataset\/main_domain\/1.1.0\/64ff7c3fee2693afdddea75002eb6887d4fedc3d812ae3622128c8504ab21655...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nDuplicatedKeysError Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 dataset = load_dataset('\/content\/drive\/MyDrive\/Thesis\/Datasets\/book_preprocessing\/goodreads_maharjan_trimmed_and_nered\/goodreadsnered.py')\r\n\r\n5 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, task, **config_kwargs)\r\n 749 try_from_hf_gcs=try_from_hf_gcs,\r\n 750 base_path=base_path,\r\n--> 751 use_auth_token=use_auth_token,\r\n 752 )\r\n 753 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 573 if not downloaded_from_gcs:\r\n 574 self._download_and_prepare(\r\n--> 575 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 576 )\r\n 577 # Sync info\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 650 try:\r\n 651 # Prepare split will record examples associated to the split\r\n--> 652 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 653 except OSError as e:\r\n 654 raise OSError(\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _prepare_split(self, split_generator)\r\n 990 writer.write(example, key)\r\n 991 finally:\r\n--> 992 num_examples, num_bytes = writer.finalize()\r\n 993 \r\n 994 split_generator.split_info.num_examples = num_examples\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in finalize(self, close_stream)\r\n 407 # In case current_examples < writer_batch_size, but user uses finalize()\r\n 408 if self._check_duplicates:\r\n--> 409 self.check_duplicate_keys()\r\n 410 # Re-intializing to empty list for next batch\r\n 411 self.hkey_record = []\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in check_duplicate_keys(self)\r\n 347 for hash, key in self.hkey_record:\r\n 348 if hash in tmp_record:\r\n--> 349 raise DuplicatedKeysError(key)\r\n 350 else:\r\n 351 tmp_record.add(hash)\r\n\r\nDuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 0\r\nKeys should be unique and deterministic in nature\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.7.0\r\n- Platform: Windows-10-10.0.19041-SP0\r\n- Python version: 3.7.9\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2440\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2440","id":908521954,"node_id":"MDU6SXNzdWU5MDg1MjE5NTQ=","number":2440,"title":"Remove `extended` field from dataset tagger","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-06-01T17:18:42Z","updated_at":"2021-06-09T09:06:31Z","closed_at":"2021-06-09T09:06:30Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhile working on #2435 I used the [dataset tagger](https:\/\/huggingface.co\/datasets\/tagging\/) to generate the missing tags for the YAML metadata of each README.md file. However, it seems that our CI raises an error when the `extended` field is included:\r\n\r\n```\r\ndataset_name = 'arcd'\r\n\r\n @pytest.mark.parametrize(\"dataset_name\", get_changed_datasets(repo_path))\r\n def test_changed_dataset_card(dataset_name):\r\n card_path = repo_path \/ \"datasets\" \/ dataset_name \/ \"README.md\"\r\n assert card_path.exists()\r\n error_messages = []\r\n try:\r\n ReadMe.from_readme(card_path)\r\n except Exception as readme_error:\r\n error_messages.append(f\"The following issues have been found in the dataset cards:\\nREADME:\\n{readme_error}\")\r\n try:\r\n DatasetMetadata.from_readme(card_path)\r\n except Exception as metadata_error:\r\n error_messages.append(\r\n f\"The following issues have been found in the dataset cards:\\nYAML tags:\\n{metadata_error}\"\r\n )\r\n \r\n if error_messages:\r\n> raise ValueError(\"\\n\".join(error_messages))\r\nE ValueError: The following issues have been found in the dataset cards:\r\nE YAML tags:\r\nE __init__() got an unexpected keyword argument 'extended'\r\n\r\ntests\/test_dataset_cards.py:70: ValueError\r\n```\r\n\r\nConsider either removing this tag from the tagger or including it as part of the validation step in the CI.\r\n\r\ncc @yjernite ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2439\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2439","id":908511983,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU5MTkzMDE3","number":2439,"title":"Better error message when trying to access elements of a DatasetDict without specifying the split","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-01T17:04:32Z","updated_at":"2021-06-15T16:03:23Z","closed_at":"2021-06-07T08:54:35Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2439","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2439","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2439.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2439.patch"},"body":"As mentioned in #2437 it'd be nice to to have an indication to the users when they try to access an element of a DatasetDict without specifying the split name.\r\n\r\ncc @thomwolf ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2438\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2438","id":908461914,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU5MTQ5Njg0","number":2438,"title":"Fix NQ features loading: reorder fields of features to match nested fields order in arrow data","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-01T16:09:30Z","updated_at":"2021-06-04T09:02:31Z","closed_at":"2021-06-04T09:02:31Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2438","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2438","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2438.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2438.patch"},"body":"As mentioned in #2401, there is an issue when loading the features of `natural_questions` since the order of the nested fields in the features don't match. The order is important since it matters for the underlying arrow schema.\r\n\r\nTo fix that I re-order the features based on the arrow schema:\r\n\r\n```python\r\ninferred_features = Features.from_arrow_schema(arrow_table.schema)\r\nself.info.features = self.info.features.reorder_fields_as(inferred_features)\r\nassert self.info.features.type == inferred_features.type\r\n```\r\n\r\nThe re-ordering is a recursive function. It takes into account that the `Sequence` feature type is a struct of list and not a list of struct.\r\n\r\nNow it's possible to load `natural_questions` again :)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2437\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2437","id":908108882,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4ODUwNTkw","number":2437,"title":"Better error message when using the wrong load_from_disk","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-06-01T09:43:22Z","updated_at":"2021-06-08T18:03:50Z","closed_at":"2021-06-08T18:03:50Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2437","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2437","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2437.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2437.patch"},"body":"As mentioned in #2424, the error message when one tries to use `Dataset.load_from_disk` to load a DatasetDict object (or _vice versa_) can be improved. I added a suggestion in the error message to let users know that they should use the other one.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2436\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2436","id":908100211,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4ODQzMzQy","number":2436,"title":"Update DatasetMetadata and ReadMe","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-06-01T09:32:37Z","updated_at":"2021-06-14T13:23:27Z","closed_at":"2021-06-14T13:23:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2436","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2436","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2436.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2436.patch"},"body":"This PR contains the changes discussed in #2395.\r\n\r\n**Edit**:\r\nIn addition to those changes, I'll be updating the `ReadMe` as follows:\r\n\r\nCurrently, `Section` has separate parsing and validation error lists. In `.validate()`, we add these lists to the final lists and throw errors.\r\n\r\nOne way to make `ReadMe` consistent with `DatasetMetadata` and add a separate `.validate()` method is to throw separate parsing and validation errors. \r\n\r\nThis way, we don't have to throw validation errors, but only parsing errors in `__init__ ()`. We can have an option in `__init__()` to suppress parsing errors so that an object is created for validation. Doing this will allow the user to get all the errors in one go.\r\n\r\nIn `test_dataset_cards` , we are already catching error messages and appending to a list. This can be done for `ReadMe()` for parsing errors, and `ReadMe(...,suppress_errors=True); readme.validate()` for validation, separately.\r\n\r\n**Edit 2**:\r\nThe only parsing issue we have as of now is multiple headings at the same level with the same name. I assume this will happen very rarely, but it is still better to throw an error than silently pick one of them. It should be okay to separate it this way. \r\n\r\nWdyt @lhoestq ?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2435\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2435","id":907505531,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MzQzNDE2","number":2435,"title":"Insert Extractive QA templates for SQuAD-like datasets","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-31T14:09:11Z","updated_at":"2021-06-03T14:34:30Z","closed_at":"2021-06-03T14:32:27Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2435","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2435","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2435.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2435.patch"},"body":"This PR adds task templates for 9 SQuAD-like templates with the following properties:\r\n\r\n* 1 config\r\n* A schema that matches the `squad` one (i.e. same column names, especially for the nested `answers` column because the current implementation does not support casting with mismatched columns. see #2434)\r\n* Less than 20GB (my laptop can't handle more right now)\r\n\r\nThe aim of this PR is to provide a few datasets to experiment with the task template integration in other libraries \/ services. \r\n\r\nPR #2429 should be merged before this one.\r\n\r\ncc @abhi1thakur ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2434\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2434","id":907503557,"node_id":"MDU6SXNzdWU5MDc1MDM1NTc=","number":2434,"title":"Extend QuestionAnsweringExtractive template to handle nested columns","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-31T14:06:51Z","updated_at":"2021-06-17T08:21:30Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Currently the `QuestionAnsweringExtractive` task template and `preprare_for_task` only support \"flat\" features. We should extend the functionality to cover QA datasets like:\r\n\r\n* `iapp_wiki_qa_squad`\r\n* `parsinlu_reading_comprehension`\r\n\r\nwhere the nested features differ with those from `squad` and trigger an `ArrowNotImplementedError`:\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nArrowNotImplementedError Traceback (most recent call last)\r\n in \r\n----> 1 ds.prepare_for_task(\"question-answering-extractive\")[0]\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in prepare_for_task(self, task)\r\n 1436 # We found a template so now flush `DatasetInfo` to skip the template update in `DatasetInfo.__post_init__`\r\n 1437 dataset.info.task_templates = None\r\n-> 1438 dataset = dataset.cast(features=template.features)\r\n 1439 return dataset\r\n 1440 \r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in cast(self, features, batch_size, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, num_proc)\r\n 977 format = self.format\r\n 978 dataset = self.with_format(\"arrow\")\r\n--> 979 dataset = dataset.map(\r\n 980 lambda t: t.cast(schema),\r\n 981 batched=True,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint, desc)\r\n 1600 \r\n 1601 if num_proc is None or num_proc == 1:\r\n-> 1602 return self._map_single(\r\n 1603 function=function,\r\n 1604 with_indices=with_indices,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 176 }\r\n 177 # apply actual function\r\n--> 178 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 179 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 180 # re-apply format to the output\r\n\r\n~\/git\/datasets\/src\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 395 # Call actual function\r\n 396 \r\n--> 397 out = func(self, *args, **kwargs)\r\n 398 \r\n 399 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset, desc)\r\n 1940 ) # Something simpler?\r\n 1941 try:\r\n-> 1942 batch = apply_function_on_filtered_inputs(\r\n 1943 batch,\r\n 1944 indices,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in apply_function_on_filtered_inputs(inputs, indices, check_same_num_examples, offset)\r\n 1836 effective_indices = [i + offset for i in indices] if isinstance(indices, list) else indices + offset\r\n 1837 processed_inputs = (\r\n-> 1838 function(*fn_args, effective_indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n 1839 )\r\n 1840 if update_data is None:\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in (t)\r\n 978 dataset = self.with_format(\"arrow\")\r\n 979 dataset = dataset.map(\r\n--> 980 lambda t: t.cast(schema),\r\n 981 batched=True,\r\n 982 batch_size=batch_size,\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.Table.cast()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/table.pxi in pyarrow.lib.ChunkedArray.cast()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/compute.py in cast(arr, target_type, safe)\r\n 241 else:\r\n 242 options = CastOptions.unsafe(target_type)\r\n--> 243 return call_function(\"cast\", [arr], options)\r\n 244 \r\n 245 \r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/_compute.pyx in pyarrow._compute.call_function()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/_compute.pyx in pyarrow._compute.Function.call()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.pyarrow_internal_check_status()\r\n\r\n~\/miniconda3\/envs\/datasets\/lib\/python3.8\/site-packages\/pyarrow\/error.pxi in pyarrow.lib.check_status()\r\n\r\nArrowNotImplementedError: Unsupported cast from struct, answer_start: list, text: list> to struct using function cast_struct\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2433\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2433","id":907488711,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MzI5MDQ4","number":2433,"title":"Fix DuplicatedKeysError in adversarial_qa","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-31T13:48:47Z","updated_at":"2021-06-01T08:52:11Z","closed_at":"2021-06-01T08:52:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2433","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2433","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2433.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2433.patch"},"body":"Fixes #2431","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2432\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2432","id":907462881,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MzA3MTE1","number":2432,"title":"Fix CI six installation on linux","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-31T13:15:36Z","updated_at":"2021-05-31T13:17:07Z","closed_at":"2021-05-31T13:17:06Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2432","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2432","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2432.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2432.patch"},"body":"For some reason we end up with this error in the linux CI when running pip install .[tests]\r\n```\r\npip._vendor.resolvelib.resolvers.InconsistentCandidate: Provided candidate AlreadyInstalledCandidate(six 1.16.0 (\/usr\/local\/lib\/python3.6\/site-packages)) does not satisfy SpecifierRequirement('six>1.9'), SpecifierRequirement('six>1.9'), SpecifierRequirement('six>=1.11'), SpecifierRequirement('six~=1.15'), SpecifierRequirement('six'), SpecifierRequirement('six>=1.5.2'), SpecifierRequirement('six>=1.9.0'), SpecifierRequirement('six>=1.11.0'), SpecifierRequirement('six'), SpecifierRequirement('six>=1.6.1'), SpecifierRequirement('six>=1.9'), SpecifierRequirement('six>=1.5'), SpecifierRequirement('six<2.0'), SpecifierRequirement('six<2.0'), SpecifierRequirement('six'), SpecifierRequirement('six'), SpecifierRequirement('six~=1.15.0'), SpecifierRequirement('six'), SpecifierRequirement('six<2.0,>=1.6.1'), SpecifierRequirement('six'), SpecifierRequirement('six>=1.5.2'), SpecifierRequirement('six>=1.9.0')\r\n```\r\nexample CI failure here:\r\nhttps:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/6200\/workflows\/b64fdec9-f9e6-431c-acd7-e9f2c440c568\/jobs\/38247\r\n\r\nThe main version requirement comes from tensorflow: `six~=1.15.0`\r\nSo I pinned the six version to this.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2431\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2431","id":907413691,"node_id":"MDU6SXNzdWU5MDc0MTM2OTE=","number":2431,"title":"DuplicatedKeysError when trying to load adversarial_qa","user":{"login":"hanss0n","id":21348833,"node_id":"MDQ6VXNlcjIxMzQ4ODMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21348833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hanss0n","html_url":"https:\/\/github.com\/hanss0n","followers_url":"https:\/\/api.github.com\/users\/hanss0n\/followers","following_url":"https:\/\/api.github.com\/users\/hanss0n\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hanss0n\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hanss0n\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hanss0n\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hanss0n\/orgs","repos_url":"https:\/\/api.github.com\/users\/hanss0n\/repos","events_url":"https:\/\/api.github.com\/users\/hanss0n\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hanss0n\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-31T12:11:19Z","updated_at":"2021-06-01T08:54:03Z","closed_at":"2021-06-01T08:52:11Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nA clear and concise description of what the bug is.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndataset = load_dataset('adversarial_qa', 'adversarialQA')\r\n```\r\n\r\n## Expected results\r\nThe dataset should be loaded into memory\r\n\r\n## Actual results\r\n\r\n>DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\n>Found duplicate Key: 4d3cb5677211ee32895ca9c66dad04d7152254d4\r\n>Keys should be unique and deterministic in nature\r\n>\r\n>\r\n>During handling of the above exception, another exception occurred:\r\n>\r\n>DuplicatedKeysError Traceback (most recent call last)\r\n>\r\n>\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in check_duplicate_keys(self)\r\n> 347 for hash, key in self.hkey_record:\r\n> 348 if hash in tmp_record:\r\n>--> 349 raise DuplicatedKeysError(key)\r\n> 350 else:\r\n> 351 tmp_record.add(hash)\r\n>\r\n>DuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\n>Found duplicate Key: 4d3cb5677211ee32895ca9c66dad04d7152254d4\r\n>Keys should be unique and deterministic in nature\r\n\r\n## Environment info\r\n- `datasets` version: 1.7.0\r\n- Platform: Linux-5.4.109+-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.10\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2430\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2430","id":907322595,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MTg3Njkw","number":2430,"title":"Add version-specific BibTeX","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-31T10:05:42Z","updated_at":"2021-06-08T07:53:22Z","closed_at":"2021-06-08T07:53:22Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2430","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2430","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2430.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2430.patch"},"body":"As pointed out by @lhoestq in #2411, after the creation of the Zenodo DOI for Datasets, a new BibTeX entry is created with each release.\r\n\r\nThis PR adds a version-specific BibTeX entry, besides the existing one which is generic for the project.\r\n\r\nSee version-specific BibTeX entry here: https:\/\/zenodo.org\/record\/4817769\/export\/hx#.YLSyd6j7RPY","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2429\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2429","id":907321665,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MTg2ODc0","number":2429,"title":"Rename QuestionAnswering template to QuestionAnsweringExtractive","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-31T10:04:42Z","updated_at":"2021-05-31T15:57:26Z","closed_at":"2021-05-31T15:57:24Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2429","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2429","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2429.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2429.patch"},"body":"Following the discussion with @thomwolf in #2255, this PR renames the QA template to distinguish extractive vs abstractive QA. The abstractive template will be added in a future PR.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2428\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2428","id":907169746,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MDU2MjI3","number":2428,"title":"Add copyright info for wiki_lingua dataset","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-31T07:22:52Z","updated_at":"2021-06-04T10:22:33Z","closed_at":"2021-06-04T10:22:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2428","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2428","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2428.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2428.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2427\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2427","id":907162923,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU4MDUwMjAx","number":2427,"title":"Add copyright info to MLSUM dataset","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-31T07:15:57Z","updated_at":"2021-06-04T09:53:50Z","closed_at":"2021-06-04T09:53:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2427","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2427","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2427.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2427.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2426\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2426","id":906473546,"node_id":"MDU6SXNzdWU5MDY0NzM1NDY=","number":2426,"title":"Saving Graph\/Structured Data in Datasets","user":{"login":"gsh199449","id":3295342,"node_id":"MDQ6VXNlcjMyOTUzNDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3295342?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gsh199449","html_url":"https:\/\/github.com\/gsh199449","followers_url":"https:\/\/api.github.com\/users\/gsh199449\/followers","following_url":"https:\/\/api.github.com\/users\/gsh199449\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gsh199449\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gsh199449\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gsh199449\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gsh199449\/orgs","repos_url":"https:\/\/api.github.com\/users\/gsh199449\/repos","events_url":"https:\/\/api.github.com\/users\/gsh199449\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gsh199449\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-29T13:35:21Z","updated_at":"2021-06-02T01:21:03Z","closed_at":"2021-06-02T01:21:03Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Thanks for this amazing library! And my question is I have structured data that is organized with a graph. For example, a dataset with users' friendship relations and user's articles. When I try to save a python dict in the dataset, an error occurred ``did not recognize Python value type when inferring an Arrow data type''.\r\nAlthough I also know that storing a python dict in pyarrow datasets is not the best practice, but I have no idea about how to save structured data in the Datasets. \r\n\r\nThank you very much for your help.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2425\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2425","id":906385457,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU3NDAwMjM3","number":2425,"title":"Fix Docstring Mistake: dataset vs. metric","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-29T06:09:53Z","updated_at":"2021-06-01T08:18:04Z","closed_at":"2021-06-01T08:18:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2425","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2425","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2425.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2425.patch"},"body":"PR to fix #2412","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2424\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2424","id":906193679,"node_id":"MDU6SXNzdWU5MDYxOTM2Nzk=","number":2424,"title":"load_from_disk and save_to_disk are not compatible with each other","user":{"login":"roholazandie","id":7584674,"node_id":"MDQ6VXNlcjc1ODQ2NzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7584674?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/roholazandie","html_url":"https:\/\/github.com\/roholazandie","followers_url":"https:\/\/api.github.com\/users\/roholazandie\/followers","following_url":"https:\/\/api.github.com\/users\/roholazandie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/roholazandie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/roholazandie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/roholazandie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/roholazandie\/orgs","repos_url":"https:\/\/api.github.com\/users\/roholazandie\/repos","events_url":"https:\/\/api.github.com\/users\/roholazandie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/roholazandie\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-28T23:07:10Z","updated_at":"2021-06-08T19:22:32Z","closed_at":"2021-06-08T19:22:32Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nload_from_disk and save_to_disk are not compatible. When I use save_to_disk to save a dataset to disk it works perfectly but given the same directory load_from_disk throws an error that it can't find state.json. looks like the load_from_disk only works on one split\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"art\")\r\ndataset.save_to_disk(\"mydir\")\r\nd = Dataset.load_from_disk(\"mydir\")\r\n```\r\n\r\n## Expected results\r\nIt is expected that these two functions be the reverse of each other without more manipulation\r\n\r\n## Actual results\r\nFileNotFoundError: [Errno 2] No such file or directory: 'mydir\/art\/state.json'\r\n\r\n## Environment info\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-5.4.0-73-generic-x86_64-with-Ubuntu-18.04-bionic\r\n- Python version: 3.7.10\r\n- PyTorch version (GPU?): 1.8.1+cu102 (True)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: \r\n- Using distributed or parallel set-up in script?: \r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2423\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2423","id":905935753,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU2OTc5MjA5","number":2423,"title":"add `desc` in `map` for `DatasetDict` object","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-28T19:28:44Z","updated_at":"2021-05-31T14:51:23Z","closed_at":"2021-05-31T13:08:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2423","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2423","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2423.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2423.patch"},"body":"`desc` in `map` currently only works with `Dataset` objects. This PR adds support for `DatasetDict` objects as well","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2422\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2422","id":905568548,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU2NjM3MzY1","number":2422,"title":"Fix save_to_disk nested features order in dataset_info.json","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-28T15:03:28Z","updated_at":"2021-05-28T15:26:57Z","closed_at":"2021-05-28T15:26:56Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2422","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2422","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2422.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2422.patch"},"body":"Fix issue https:\/\/github.com\/huggingface\/datasets\/issues\/2267\r\n\r\nThe order of the nested features matters (pyarrow limitation), but the save_to_disk method was saving the features types as JSON with `sort_keys=True`, which was breaking the order of the nested features.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2421\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2421","id":905549756,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU2NjIwMTM3","number":2421,"title":"doc: fix typo HF_MAX_IN_MEMORY_DATASET_SIZE_IN_BYTES","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-28T14:52:10Z","updated_at":"2021-06-04T09:52:45Z","closed_at":"2021-06-04T09:52:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2421","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2421","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2421.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2421.patch"},"body":"MAX_MEMORY_DATASET_SIZE_IN_BYTES should be HF_MAX_MEMORY_DATASET_SIZE_IN_BYTES","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2420\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2420","id":904821772,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1OTQ1ODgw","number":2420,"title":"Updated Dataset Description","user":{"login":"binny-mathew","id":10741860,"node_id":"MDQ6VXNlcjEwNzQxODYw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10741860?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/binny-mathew","html_url":"https:\/\/github.com\/binny-mathew","followers_url":"https:\/\/api.github.com\/users\/binny-mathew\/followers","following_url":"https:\/\/api.github.com\/users\/binny-mathew\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/binny-mathew\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/binny-mathew\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/binny-mathew\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/binny-mathew\/orgs","repos_url":"https:\/\/api.github.com\/users\/binny-mathew\/repos","events_url":"https:\/\/api.github.com\/users\/binny-mathew\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/binny-mathew\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-28T07:10:51Z","updated_at":"2021-06-10T12:11:35Z","closed_at":"2021-06-10T12:11:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2420","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2420","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2420.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2420.patch"},"body":"Added Point of contact information and several other details about the dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2419\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2419","id":904347339,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1NTA1OTM1","number":2419,"title":"adds license information for DailyDialog.","user":{"login":"aditya2211","id":11574558,"node_id":"MDQ6VXNlcjExNTc0NTU4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11574558?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/aditya2211","html_url":"https:\/\/github.com\/aditya2211","followers_url":"https:\/\/api.github.com\/users\/aditya2211\/followers","following_url":"https:\/\/api.github.com\/users\/aditya2211\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/aditya2211\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/aditya2211\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/aditya2211\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/aditya2211\/orgs","repos_url":"https:\/\/api.github.com\/users\/aditya2211\/repos","events_url":"https:\/\/api.github.com\/users\/aditya2211\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/aditya2211\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-27T23:03:42Z","updated_at":"2021-05-31T13:16:52Z","closed_at":"2021-05-31T13:16:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2419","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2419","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2419.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2419.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2418\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2418","id":904051497,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1MjM2OTEz","number":2418,"title":"add utf-8 while reading README","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-27T18:12:28Z","updated_at":"2021-06-04T09:55:01Z","closed_at":"2021-06-04T09:55:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2418","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2418","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2418.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2418.patch"},"body":"It was causing tests to fail in Windows (see #2416). In Windows, the default encoding is CP1252 which is unable to decode the character byte 0x9d ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2417\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2417","id":903956071,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1MTU3NTI4","number":2417,"title":"Make datasets PEP-561 compliant","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-27T16:16:17Z","updated_at":"2021-05-28T13:10:10Z","closed_at":"2021-05-28T13:09:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2417","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2417","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2417.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2417.patch"},"body":"Allows to type-check datasets with `mypy` when imported as a third-party library\r\n\r\nPEP-561: https:\/\/www.python.org\/dev\/peps\/pep-0561\r\nMyPy doc on the subject: https:\/\/mypy.readthedocs.io\/en\/stable\/installed_packages.html\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2416\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2416","id":903932299,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1MTM3NDUy","number":2416,"title":"Add KLUE dataset","user":{"login":"jungwhank","id":53588015,"node_id":"MDQ6VXNlcjUzNTg4MDE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53588015?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jungwhank","html_url":"https:\/\/github.com\/jungwhank","followers_url":"https:\/\/api.github.com\/users\/jungwhank\/followers","following_url":"https:\/\/api.github.com\/users\/jungwhank\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jungwhank\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jungwhank\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jungwhank\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jungwhank\/orgs","repos_url":"https:\/\/api.github.com\/users\/jungwhank\/repos","events_url":"https:\/\/api.github.com\/users\/jungwhank\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jungwhank\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-05-27T15:49:51Z","updated_at":"2021-06-09T15:00:02Z","closed_at":"2021-06-04T17:45:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2416","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2416","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2416.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2416.patch"},"body":"Add `KLUE (Korean Language Understanding Evaluation)` dataset released recently from [paper](https:\/\/arxiv.org\/abs\/2105.09680), [github](https:\/\/github.com\/KLUE-benchmark\/KLUE) and [webpage](https:\/\/klue-benchmark.com\/tasks).\r\nPlease let me know if there's anything missing in the code or README.\r\nThanks!\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2415\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2415","id":903923097,"node_id":"MDU6SXNzdWU5MDM5MjMwOTc=","number":2415,"title":"Cached dataset not loaded","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-27T15:40:06Z","updated_at":"2021-06-02T13:15:47Z","closed_at":"2021-06-02T13:15:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nI have a large dataset (common_voice, english) where I use several map and filter functions.\r\nSometimes my cached datasets after specific functions are not loaded.\r\nI always use the same arguments, same functions, no seed\u2026\r\n\r\n## Steps to reproduce the bug\r\n```python\r\ndef filter_by_duration(batch):\r\n return (\r\n batch[\"duration\"] <= 10\r\n and batch[\"duration\"] >= 1\r\n and len(batch[\"target_text\"]) > 5\r\n )\r\n\r\ndef prepare_dataset(batch):\r\n batch[\"input_values\"] = processor(\r\n batch[\"speech\"], sampling_rate=batch[\"sampling_rate\"][0]\r\n ).input_values\r\n with processor.as_target_processor():\r\n batch[\"labels\"] = processor(batch[\"target_text\"]).input_ids\r\n return batch\r\n\r\ntrain_dataset = train_dataset.filter(\r\n filter_by_duration,\r\n remove_columns=[\"duration\"],\r\n num_proc=data_args.preprocessing_num_workers,\r\n)\r\n\r\n# PROBLEM HERE -> below function is reexecuted and cache is not loaded\r\ntrain_dataset = train_dataset.map(\r\n prepare_dataset,\r\n remove_columns=train_dataset.column_names,\r\n batch_size=training_args.per_device_train_batch_size,\r\n batched=True,\r\n num_proc=data_args.preprocessing_num_workers,\r\n)\r\n\r\n# Later in script\r\nset_caching_enabled(False)\r\n# apply map on trained model to eval\/test sets\r\n\r\n```\r\n\r\n## Expected results\r\nThe cached dataset should always be reloaded.\r\n\r\n## Actual results\r\nThe function is reexecuted.\r\n\r\nI have access to cached files `cache-xxxxx.arrow`.\r\nIs there a way I can somehow load manually 2 versions and see how the hash was created for debug purposes (to know if it's an issue with dataset or function)?\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-5.8.0-45-generic-x86_64-with-glibc2.29\r\n- Python version: 3.8.5\r\n- PyTorch version (GPU?): 1.8.1+cu102 (True)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: Yes\r\n- Using distributed or parallel set-up in script?: No","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2414\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2414","id":903877096,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU1MDg5OTIw","number":2414,"title":"Update README.md","user":{"login":"cryoff","id":15029054,"node_id":"MDQ6VXNlcjE1MDI5MDU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15029054?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cryoff","html_url":"https:\/\/github.com\/cryoff","followers_url":"https:\/\/api.github.com\/users\/cryoff\/followers","following_url":"https:\/\/api.github.com\/users\/cryoff\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cryoff\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cryoff\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cryoff\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cryoff\/orgs","repos_url":"https:\/\/api.github.com\/users\/cryoff\/repos","events_url":"https:\/\/api.github.com\/users\/cryoff\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cryoff\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-27T14:53:19Z","updated_at":"2021-06-28T13:46:14Z","closed_at":"2021-06-28T13:04:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2414","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2414","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2414.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2414.patch"},"body":"Provides description of data instances and dataset features\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2413\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2413","id":903777557,"node_id":"MDU6SXNzdWU5MDM3Nzc1NTc=","number":2413,"title":"AttributeError: 'DatasetInfo' object has no attribute 'task_templates'","user":{"login":"jungwhank","id":53588015,"node_id":"MDQ6VXNlcjUzNTg4MDE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53588015?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jungwhank","html_url":"https:\/\/github.com\/jungwhank","followers_url":"https:\/\/api.github.com\/users\/jungwhank\/followers","following_url":"https:\/\/api.github.com\/users\/jungwhank\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jungwhank\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jungwhank\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jungwhank\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jungwhank\/orgs","repos_url":"https:\/\/api.github.com\/users\/jungwhank\/repos","events_url":"https:\/\/api.github.com\/users\/jungwhank\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jungwhank\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-27T13:44:28Z","updated_at":"2021-06-01T01:05:47Z","closed_at":"2021-06-01T01:05:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nHello, \r\nI'm trying to add dataset and contribute, but test keep fail with below cli.\r\n` RUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_`\r\n\r\n## Steps to reproduce the bug\r\nIt seems like a bug when I see an error with the existing dataset, not the dataset I'm trying to add.\r\n\r\n` RUN_SLOW=1 pytest tests\/test_dataset_common.py::LocalDatasetTest::test_load_dataset_all_configs_`\r\n\r\n\r\n## Expected results\r\nAll test passed\r\n\r\n## Actual results\r\n```\r\n # check that dataset is not empty\r\n self.parent.assertListEqual(sorted(dataset_builder.info.splits.keys()), sorted(dataset))\r\n for split in dataset_builder.info.splits.keys():\r\n # check that loaded datset is not empty\r\n self.parent.assertTrue(len(dataset[split]) > 0)\r\n \r\n # check that we can cast features for each task template\r\n> task_templates = dataset_builder.info.task_templates\r\nE AttributeError: 'DatasetInfo' object has no attribute 'task_templates'\r\n\r\ntests\/test_dataset_common.py:175: AttributeError\r\n```\r\n\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2\r\n- Platform: Darwin-20.4.0-x86_64-i386-64bit\r\n- Python version: 3.7.7\r\n- PyTorch version (GPU?): 1.7.0 (False)\r\n- Tensorflow version (GPU?): 2.3.0 (False)\r\n- Using GPU in script?: No\r\n- Using distributed or parallel set-up in script?: No\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2412\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2412","id":903769151,"node_id":"MDU6SXNzdWU5MDM3NjkxNTE=","number":2412,"title":"Docstring mistake: dataset vs. metric","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-27T13:39:11Z","updated_at":"2021-06-01T08:18:04Z","closed_at":"2021-06-01T08:18:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"This:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/d95b95f8cf3cb0cff5f77a675139b584dcfcf719\/src\/datasets\/load.py#L582\r\n\r\nShould better be something like:\r\n\r\n`a metric identifier on HuggingFace AWS bucket (list all available metrics and ids with ``datasets.list_metrics()``)`\r\n\r\nI can provide a PR l8er...","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2411\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2411","id":903671778,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU0OTAzNjg2","number":2411,"title":"Add DOI badge to README","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-27T12:36:47Z","updated_at":"2021-05-27T13:42:54Z","closed_at":"2021-05-27T13:42:54Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2411","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2411","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2411.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2411.patch"},"body":"Once published the latest release, the DOI badge has been automatically generated by Zenodo.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2410\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2410","id":903613676,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU0ODUwMjY4","number":2410,"title":"fix #2391 add original answers in kilt-TriviaQA","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-27T11:54:29Z","updated_at":"2021-06-15T12:35:57Z","closed_at":"2021-06-14T17:29:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2410","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2410","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2410.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2410.patch"},"body":"cc @yjernite is it ok like this?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2409\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2409","id":903441398,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU0Njk3NjA0","number":2409,"title":"Add HF_ prefix to env var MAX_IN_MEMORY_DATASET_SIZE_IN_BYTES","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":14,"created_at":"2021-05-27T09:07:00Z","updated_at":"2021-06-08T16:00:55Z","closed_at":"2021-05-27T09:33:41Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2409","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2409","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2409.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2409.patch"},"body":"As mentioned in https:\/\/github.com\/huggingface\/datasets\/pull\/2399 the env var should be prefixed by HF_","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2408\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2408","id":903422648,"node_id":"MDExOlB1bGxSZXF1ZXN0NjU0NjgxMzE4","number":2408,"title":"Fix head_qa keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-27T08:50:19Z","updated_at":"2021-05-27T09:05:37Z","closed_at":"2021-05-27T09:05:36Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2408","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2408","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2408.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2408.patch"},"body":"There were duplicate in the keys, as mentioned in #2382 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2407\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2407","id":903111755,"node_id":"MDU6SXNzdWU5MDMxMTE3NTU=","number":2407,"title":".map() function got an unexpected keyword argument 'cache_file_name'","user":{"login":"cindyxinyiwang","id":7390482,"node_id":"MDQ6VXNlcjczOTA0ODI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7390482?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cindyxinyiwang","html_url":"https:\/\/github.com\/cindyxinyiwang","followers_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/followers","following_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/orgs","repos_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/repos","events_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cindyxinyiwang\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-27T01:54:26Z","updated_at":"2021-05-27T13:46:40Z","closed_at":"2021-05-27T13:46:40Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI'm trying to save the result of datasets.map() to a specific file, so that I can easily share it among multiple computers without reprocessing the dataset. However, when I try to pass an argument 'cache_file_name' to the .map() function, it throws an error that \".map() function got an unexpected keyword argument 'cache_file_name'\". \r\n\r\nI believe I'm using the latest dataset 1.6.2. Also seems like the document and the actual code indicates there is an argument 'cache_file_name' for the .map() function.\r\n\r\nHere is the code I use\r\n## Steps to reproduce the bug\r\n```datasets = load_from_disk(dataset_path=my_path)\r\n\r\n[...]\r\n\r\ndef tokenize_function(examples):\r\n return tokenizer(examples[text_column_name])\r\n\r\nlogger.info(\"Mapping dataset to tokenized dataset.\")\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=preprocessing_num_workers,\r\n remove_columns=column_names,\r\n load_from_cache_file=True,\r\n cache_file_name=\"my_tokenized_file\"\r\n)\r\n```\r\n\r\n## Actual results\r\n tokenized_datasets = datasets.map(\r\nTypeError: map() got an unexpected keyword argument 'cache_file_name'\r\n\r\n## Environment info\r\n\r\n- `datasets` version:1.6.2\r\n- Platform:Linux-4.18.0-193.28.1.el8_2.x86_64-x86_64-with-glibc2.10\r\n- Python version:3.8.5\r\n- PyArrow version:3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2406\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2406","id":902643844,"node_id":"MDU6SXNzdWU5MDI2NDM4NDQ=","number":2406,"title":"Add guide on using task templates to documentation","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-26T16:28:26Z","updated_at":"2021-05-26T16:28:26Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Once we have a stable API on the text classification and question answering task templates, add a guide on how to use them in the documentation.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2405\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2405","id":901227658,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUyNzA2OTk1","number":2405,"title":"Add dataset tags","user":{"login":"OyvindTafjord","id":6453366,"node_id":"MDQ6VXNlcjY0NTMzNjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6453366?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/OyvindTafjord","html_url":"https:\/\/github.com\/OyvindTafjord","followers_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/followers","following_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/orgs","repos_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/repos","events_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/OyvindTafjord\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-25T18:57:29Z","updated_at":"2021-05-26T16:54:16Z","closed_at":"2021-05-26T16:40:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2405","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2405","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2405.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2405.patch"},"body":"The dataset tags were provided by Peter Clark following the guide.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2404\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2404","id":901179832,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUyNjYzOTcz","number":2404,"title":"Paperswithcode dataset mapping","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-25T18:14:26Z","updated_at":"2021-05-26T11:21:25Z","closed_at":"2021-05-26T11:17:18Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2404","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2404","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2404.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2404.patch"},"body":"This is a continuation of https:\/\/github.com\/huggingface\/huggingface_hub\/pull\/43, encoded directly inside dataset cards.\r\n\r\nAs discussed:\r\n- `paperswithcode_id: null` when the dataset doesn't exist on paperswithcode's side.\r\n- I've added this new key at the end of the yaml instead of ordering all keys alphabetically as pyyaml's default. No strong opinion on that one though\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2403\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2403","id":900059014,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUxNjcxMTMw","number":2403,"title":"Free datasets with cache file in temp dir on exit","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-24T22:15:11Z","updated_at":"2021-05-26T17:25:19Z","closed_at":"2021-05-26T16:39:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2403","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2403","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2403.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2403.patch"},"body":"This PR properly cleans up the memory-mapped tables that reference the cache files inside the temp dir.\r\nSince the built-in `_finalizer` of `TemporaryDirectory` can't be modified, this PR defines its own `TemporaryDirectory` class that accepts a custom clean-up function.\r\n\r\nFixes #2402","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2402\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2402","id":900025329,"node_id":"MDU6SXNzdWU5MDAwMjUzMjk=","number":2402,"title":"PermissionError on Windows when using temp dir for caching","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-24T21:22:59Z","updated_at":"2021-05-26T16:39:29Z","closed_at":"2021-05-26T16:39:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Currently, the following code raises a PermissionError on master if working on Windows:\r\n\r\n```python\r\n# run as a script or call exit() in REPL to initiate the temp dir cleanup\r\nfrom datasets import *\r\nd = load_dataset(\"sst\", split=\"train\", keep_in_memory=False)\r\nset_caching_enabled(False)\r\nd.map(lambda ex: ex)\r\n```\r\n\r\nError stack trace:\r\n```\r\nTraceback (most recent call last):\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\weakref.py\", line 624, in _exitfunc\r\n f()\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\weakref.py\", line 548, in __call__\r\n return info.func(*info.args, **(info.kwargs or {}))\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\tempfile.py\", line 799, in _cleanup\r\n _shutil.rmtree(name)\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\shutil.py\", line 500, in rmtree\r\n return _rmtree_unsafe(path, onerror)\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\shutil.py\", line 395, in _rmtree_unsafe\r\n onerror(os.unlink, fullname, sys.exc_info())\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\shutil.py\", line 393, in _rmtree_unsafe\r\n os.unlink(fullname)\r\nPermissionError: [WinError 5] Access is denied: 'C:\\\\Users\\\\Mario\\\\AppData\\\\Local\\\\Temp\\\\tmp20epyhmq\\\\cache-87a87ffb5a956e68.arrow'\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2401\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2401","id":899910521,"node_id":"MDU6SXNzdWU4OTk5MTA1MjE=","number":2401,"title":"load_dataset('natural_questions') fails with \"ValueError: External features info don't match the dataset\"","user":{"login":"jonrbates","id":15602718,"node_id":"MDQ6VXNlcjE1NjAyNzE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15602718?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jonrbates","html_url":"https:\/\/github.com\/jonrbates","followers_url":"https:\/\/api.github.com\/users\/jonrbates\/followers","following_url":"https:\/\/api.github.com\/users\/jonrbates\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jonrbates\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jonrbates\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jonrbates\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jonrbates\/orgs","repos_url":"https:\/\/api.github.com\/users\/jonrbates\/repos","events_url":"https:\/\/api.github.com\/users\/jonrbates\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jonrbates\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-05-24T18:38:53Z","updated_at":"2021-06-09T09:07:25Z","closed_at":"2021-06-09T09:07:25Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nload_dataset('natural_questions') throws ValueError\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ndatasets = load_dataset('natural_questions', split='validation[:10]')\r\n```\r\n\r\n## Expected results\r\nCall to load_dataset returns data.\r\n\r\n## Actual results\r\n```\r\nUsing custom data configuration default\r\nReusing dataset natural_questions (\/mnt\/d\/huggingface\/datasets\/natural_questions\/default\/0.0.2\/19bc04755018a3ad02ee74f7045cde4ba9b4162cb64450a87030ab786b123b76)\r\n---------------------------------------------------------------------------\r\nValueError Traceback (most recent call last)\r\n in \r\n----> 1 datasets = load_dataset('natural_questions', split='validation[:10]', cache_dir='\/mnt\/d\/huggingface\/datasets')\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, **config_kwargs)\r\n 756 keep_in_memory if keep_in_memory is not None else is_small_dataset(builder_instance.info.dataset_size)\r\n 757 )\r\n--> 758 ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n 759 if save_infos:\r\n 760 builder_instance._save_infos()\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py in as_dataset(self, split, run_post_process, ignore_verifications, in_memory)\r\n 735 \r\n 736 # Create a dataset for each of the given splits\r\n--> 737 datasets = utils.map_nested(\r\n 738 partial(\r\n 739 self._build_single_dataset,\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py in map_nested(function, data_struct, dict_only, map_list, map_tuple, map_numpy, num_proc, types)\r\n 193 # Singleton\r\n 194 if not isinstance(data_struct, dict) and not isinstance(data_struct, types):\r\n--> 195 return function(data_struct)\r\n 196 \r\n 197 disable_tqdm = bool(logger.getEffectiveLevel() > INFO)\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py in _build_single_dataset(self, split, run_post_process, ignore_verifications, in_memory)\r\n 762 \r\n 763 # Build base dataset\r\n--> 764 ds = self._as_dataset(\r\n 765 split=split,\r\n 766 in_memory=in_memory,\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/builder.py in _as_dataset(self, split, in_memory)\r\n 838 in_memory=in_memory,\r\n 839 )\r\n--> 840 return Dataset(**dataset_kwargs)\r\n 841 \r\n 842 def _post_process(self, dataset: Dataset, resources_paths: Dict[str, str]) -> Optional[Dataset]:\r\n\r\n~\/miniconda3\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py in __init__(self, arrow_table, info, split, indices_table, fingerprint)\r\n 271 assert self._fingerprint is not None, \"Fingerprint can't be None in a Dataset object\"\r\n 272 if self.info.features.type != inferred_features.type:\r\n--> 273 raise ValueError(\r\n 274 \"External features info don't match the dataset:\\nGot\\n{}\\nwith type\\n{}\\n\\nbut expected something like\\n{}\\nwith type\\n{}\".format(\r\n 275 self.info.features, self.info.features.type, inferred_features, inferred_features.type\r\n\r\nValueError: External features info don't match the dataset:\r\nGot\r\n{'id': Value(dtype='string', id=None), 'document': {'title': Value(dtype='string', id=None), 'url': Value(dtype='string', id=None), 'html': Value(dtype='string', id=None), 'tokens': Sequence(feature={'token': Value(dtype='string', id=None), 'is_html': Value(dtype='bool', id=None)}, length=-1, id=None)}, 'question': {'text': Value(dtype='string', id=None), 'tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}, 'annotations': Sequence(feature={'id': Value(dtype='string', id=None), 'long_answer': {'start_token': Value(dtype='int64', id=None), 'end_token': Value(dtype='int64', id=None), 'start_byte': Value(dtype='int64', id=None), 'end_byte': Value(dtype='int64', id=None)}, 'short_answers': Sequence(feature={'start_token': Value(dtype='int64', id=None), 'end_token': Value(dtype='int64', id=None), 'start_byte': Value(dtype='int64', id=None), 'end_byte': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None)}, length=-1, id=None), 'yes_no_answer': ClassLabel(num_classes=2, names=['NO', 'YES'], names_file=None, id=None)}, length=-1, id=None)}\r\nwith type\r\nstruct, long_answer: list>, short_answers: list, end_token: list, start_byte: list, start_token: list, text: list>>, yes_no_answer: list>, document: struct, token: list>>, id: string, question: struct>>\r\n\r\nbut expected something like\r\n{'id': Value(dtype='string', id=None), 'document': {'html': Value(dtype='string', id=None), 'title': Value(dtype='string', id=None), 'tokens': {'is_html': Sequence(feature=Value(dtype='bool', id=None), length=-1, id=None), 'token': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}, 'url': Value(dtype='string', id=None)}, 'question': {'text': Value(dtype='string', id=None), 'tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}, 'annotations': {'id': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'long_answer': [{'end_byte': Value(dtype='int64', id=None), 'end_token': Value(dtype='int64', id=None), 'start_byte': Value(dtype='int64', id=None), 'start_token': Value(dtype='int64', id=None)}], 'short_answers': [{'end_byte': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'end_token': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'start_byte': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'start_token': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'text': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}], 'yes_no_answer': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None)}}\r\nwith type\r\nstruct, long_answer: list>, short_answers: list, end_token: list, start_byte: list, start_token: list, text: list>>, yes_no_answer: list>, document: struct, token: list>, url: string>, id: string, question: struct>>\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-5.4.72-microsoft-standard-WSL2-x86_64-with-glibc2.10\r\n- Python version: 3.8.3\r\n- PyTorch version (GPU?): 1.6.0 (False)\r\n- Tensorflow version (GPU?): not installed (NA)\r\n- Using GPU in script?: No\r\n- Using distributed or parallel set-up in script?: No\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2400\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2400","id":899867212,"node_id":"MDU6SXNzdWU4OTk4NjcyMTI=","number":2400,"title":"Concatenate several datasets with removed columns is not working.","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-24T17:40:15Z","updated_at":"2021-05-25T05:52:01Z","closed_at":"2021-05-25T05:51:59Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nYou can't concatenate datasets when you removed columns before.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset, concatenate_datasets\r\n\r\nwikiann= load_dataset(\"wikiann\",\"en\")\r\n\r\nwikiann[\"train\"] = wikiann[\"train\"].remove_columns([\"langs\",\"spans\"])\r\nwikiann[\"test\"] = wikiann[\"test\"].remove_columns([\"langs\",\"spans\"])\r\n\r\nassert wikiann[\"train\"].features.type == wikiann[\"test\"].features.type\r\n\r\nconcate = concatenate_datasets([wikiann[\"train\"],wikiann[\"test\"]])\r\n```\r\n\r\n## Expected results\r\nMerged dataset \r\n\r\n\r\n## Actual results\r\n```python\r\nValueError: External features info don't match the dataset:\r\nGot\r\n{'tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'ner_tags': Sequence(feature=ClassLabel(num_classes=7, names=['O', 'B-PER', 'I-PER', 'B-ORG', 'I-ORG', 'B-LOC', 'I-LOC'], names_file=None, id=None), length=-1, id=None), 'langs': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'spans': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}\r\nwith type\r\nstruct, ner_tags: list, spans: list, tokens: list>\r\n\r\nbut expected something like\r\n{'ner_tags': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None)}\r\nwith type\r\nstruct, tokens: list>\r\n```\r\n## Environment info\r\n\r\n- `datasets` version: ~1.6.2~ 1.5.0\r\n- Platform: macos\r\n- Python version: 3.8.5\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2399\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2399","id":899853610,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUxNDk0OTc2","number":2399,"title":"Add env variable for MAX_IN_MEMORY_DATASET_SIZE_IN_BYTES","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-24T17:19:15Z","updated_at":"2021-05-27T09:07:15Z","closed_at":"2021-05-26T16:07:54Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2399","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2399","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2399.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2399.patch"},"body":"Add env variable for `MAX_IN_MEMORY_DATASET_SIZE_IN_BYTES`.\r\n\r\nThis will allow to turn off default behavior: loading in memory (and not caching) small datasets.\r\n\r\nFix #2387.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2398\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2398","id":899511837,"node_id":"MDU6SXNzdWU4OTk1MTE4Mzc=","number":2398,"title":"News_commentary Dataset Translation Pairs are of Incorrect Language Specified Pairs","user":{"login":"anassalamah","id":8571003,"node_id":"MDQ6VXNlcjg1NzEwMDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8571003?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anassalamah","html_url":"https:\/\/github.com\/anassalamah","followers_url":"https:\/\/api.github.com\/users\/anassalamah\/followers","following_url":"https:\/\/api.github.com\/users\/anassalamah\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anassalamah\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anassalamah\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anassalamah\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anassalamah\/orgs","repos_url":"https:\/\/api.github.com\/users\/anassalamah\/repos","events_url":"https:\/\/api.github.com\/users\/anassalamah\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anassalamah\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-24T10:03:34Z","updated_at":"2021-05-24T10:03:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I used load_dataset to load the news_commentary dataset for \"ar-en\" translation pairs but found translations from Arabic to Hindi. \r\n\r\n```\r\ntrain_ds = load_dataset(\"news_commentary\", \"ar-en\", split='train[:98%]')\r\nval_ds = load_dataset(\"news_commentary\", \"ar-en\", split='train[98%:]')\r\n\r\n# filtering out examples that are not ar-en translations but ar-hi\r\nval_ds = val_ds.filter(lambda example, indice: indice not in chain(range(1312,1327) ,range(1384,1399), range(1030,1042)), with_indices=True)\r\n```\r\n\r\n* I'm fairly new to using datasets so I might be doing something wrong","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2397\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2397","id":899427378,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUxMTMxMTY0","number":2397,"title":"Fix number of classes in indic_glue sna.bn dataset","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-24T08:18:55Z","updated_at":"2021-05-25T16:32:16Z","closed_at":"2021-05-25T16:32:16Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2397","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2397","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2397.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2397.patch"},"body":"As read in the [paper](https:\/\/www.aclweb.org\/anthology\/2020.findings-emnlp.445.pdf), Table 11.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2396\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2396","id":899016308,"node_id":"MDU6SXNzdWU4OTkwMTYzMDg=","number":2396,"title":"strange datasets from OSCAR corpus","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-23T13:06:02Z","updated_at":"2021-06-17T13:54:37Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/50871412\/119260850-4f876b80-bc07-11eb-8894-124302600643.png)\r\n![image](https:\/\/user-images.githubusercontent.com\/50871412\/119260875-675eef80-bc07-11eb-9da4-ee27567054ac.png)\r\nFrom the [official site ](https:\/\/oscar-corpus.com\/), the Yue Chinese dataset should have 2.2KB data.\r\n7 training instances is obviously not a right number.\r\nAs I can read Yue Chinese, I call tell the last instance is definitely not something that would appear on Common Crawl.\r\nAnd even if you don't read Yue Chinese, you can tell the first six instance are problematic.\r\n(It is embarrassing, as the 7 training instances look exactly like something from a pornographic novel or flitting messages in a chat of a dating app)\r\nIt might not be the problem of the huggingface\/datasets implementation, because when I tried to download the dataset from the official site, I found out that the zip file is corrupted.\r\nI will try to inform the host of OSCAR corpus later.\r\nAwy a remake about this dataset in huggingface\/datasets is needed, perhaps after the host of the dataset fixes the issue.\r\n\r\n> Hi @jerryIsHere , sorry for the late response! Sadly this is normal, the problem comes form fasttext's classifier which we used to create the original corpus. In general the classifier is not really capable of properly recognizing Yue Chineese so the file ends un being just noise from Common Crawl. Some of these problems with OSCAR were already discussed [here](https:\/\/arxiv.org\/pdf\/2103.12028.pdf) but we are working on explicitly documenting the problems by language on our website. In fact, could please you open an issue on [our repo](https:\/\/github.com\/oscar-corpus\/oscar-website\/issues) as well so that we can track it?\r\n\r\nThanks a lot, the new post is here:\r\nhttps:\/\/github.com\/oscar-corpus\/oscar-website\/issues\/11","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2395\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2395","id":898762730,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUwNTk3NjI0","number":2395,"title":"`pretty_name` for dataset in YAML tags","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":18,"created_at":"2021-05-22T09:24:45Z","updated_at":"2021-06-24T14:14:11Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2395","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2395","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2395.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2395.patch"},"body":"I'm updating `pretty_name` for datasets in YAML tags as discussed with @lhoestq. Here are the first 10, please let me know if they're looking good.\r\n\r\nIf dataset has 1 config, I've added `pretty_name` as `config_name: full_name_of_dataset` as config names were `plain_text`, `default`, `squad` etc (not so important in this case) whereas when dataset has >1 configs, I've added `config_name: full_name_of_dataset+config_name` so as to let user know about the `config` here. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2392\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2392","id":898156795,"node_id":"MDExOlB1bGxSZXF1ZXN0NjUwMDYxOTE3","number":2392,"title":"Update text classification template labels in DatasetInfo __post_init__","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-21T15:29:41Z","updated_at":"2021-05-28T11:37:35Z","closed_at":"2021-05-28T11:37:32Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2392","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2392","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2392.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2392.patch"},"body":"This PR implements the idea discussed in #2389 to update the `labels` of the `TextClassification` template in the `DatasetInfo.__post_init__`. The main reason for doing so is so avoid duplicating the label definitions in both `DatasetInfo.features` and `DatasetInfo.task_templates`.\r\n\r\nTo avoid storing state in `DatasetInfo.__post_init__`, the current implementation flushes `DatasetInfo.task_templates` before the features are cast in `Dataset.prepare_for_task` (thanks to @mariosasko for this idea!).\r\n\r\nHere is an example of the current workflow:\r\n\r\n```python\r\nds1 = load_dataset(\".\/datasets\/emotion\/\")\r\n# cast features and flush templates\r\nds2 = ds1.prepare_for_task(\"text-classification\")\r\nassert ds2.info.task_templates is None\r\n```\r\n\r\nNote that if users want to pass a `TextClassification` template to `prepare_for_task`, we require them to set `TextClassification.labels` to match the dataset's features corresponding to `label_column`:\r\n\r\n```python\r\nds1 = load_dataset(\".\/datasets\/emotion\/\")\r\n# TextClassification.labels is None by default => invalid template\r\ntask = TextClassification(text_column=\"text\", label_column=\"label\")\r\n# Raises ValueError\r\nds1.prepare_for_task(task)\r\n# Specifying the labels => valid template\r\ntask = TextClassification(text_column=\"text\", label_column=\"label\", labels=['anger', 'fear', 'joy', 'love', 'sadness', 'surprise'])\r\nds1.prepare_for_task(task)\r\n```\r\n\r\nThis PR also adds:\r\n\r\n* New tests + fixed some old tests that weren't testing `assertRaises` properly\r\n* A decorator to share docstrings across common functions. This allows us to document `DatasetDict.prepare_for_task` and `Dataset.prepare_for_task` in one place.\r\n* Fixes to avoid side-effects from in-place replacements of `DatasetInfo.task_templates` in `DatasetInfo.__post_init__`. Thanks to @lhoestq for figuring this out!\r\n* Removal of `FeaturesWithLazyClassLabel` since we now create a new instance of `TextClassification` in `DatasetInfo.__post_init__` and avoid the side-effects first pointed out by @mariosasko \r\n\r\n### PR Description from original WIP \r\n\r\nHi @yjernite and @lhoestq, here's a first stab at the suggestion discussed in #2389 to update the `labels` of the `TextClassification` template in the `DatasetInfo.__post_init__`.\r\n\r\nOne problem I've spotted is that my current implementation introduces state into the `__post_init__`: \r\n\r\n* When we call `load_dataset`, `DatasetInfo.features` are the \"raw\" features without any casting so we can access the column names by the `label_column` specified in `TextClassification`\r\n* When we call `Dataset.prepare_for_task` we run into a problem because the `DatasetInfo.features` are first cast into the new schema which triggers a `KeyError` when we update the infos [here](https:\/\/github.com\/huggingface\/datasets\/blob\/8b2a78520828e0cc13c14a31f413a5395ef25110\/src\/datasets\/arrow_dataset.py#L1959).\r\n\r\nHere's an explicit example of what I mean with the stack trace appended below:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# this works \r\nds = load_dataset(\"emotion\")\r\n# we can verify the task template is correctly set\r\nds[\"train\"].info.task_templates # returns [TextClassification(labels=('sadness', 'joy', 'love', 'anger', 'fear', 'surprise'), text_column='text', label_column='label')]\r\n# but this fails because the _post_init__ is looking for the original column names\r\nds.prepare_for_task(\"text-classification\")\r\n```\r\n```\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in \r\n----> 1 ds.prepare_for_task(\"text-classification\")\r\n\r\n~\/git\/datasets\/src\/datasets\/dataset_dict.py in prepare_for_task(self, task)\r\n 807 \"\"\"\r\n 808 self._check_values_type()\r\n--> 809 return DatasetDict({k: dataset.prepare_for_task(task=task) for k, dataset in self.items()})\r\n\r\n~\/git\/datasets\/src\/datasets\/dataset_dict.py in (.0)\r\n 807 \"\"\"\r\n 808 self._check_values_type()\r\n--> 809 return DatasetDict({k: dataset.prepare_for_task(task=task) for k, dataset in self.items()})\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in prepare_for_task(self, task)\r\n 1421 dataset = self.remove_columns(columns_to_drop)\r\n 1422 dataset = dataset.rename_columns(column_mapping)\r\n-> 1423 dataset = dataset.cast(features=template.features)\r\n 1424 return dataset\r\n 1425 \r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in cast(self, features, batch_size, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, num_proc)\r\n 970 format = self.format\r\n 971 dataset = self.with_format(\"arrow\")\r\n--> 972 dataset = dataset.map(\r\n 973 lambda t: t.cast(schema),\r\n 974 batched=True,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1583 \r\n 1584 if num_proc is None or num_proc == 1:\r\n-> 1585 return self._map_single(\r\n 1586 function=function,\r\n 1587 with_indices=with_indices,\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in wrapper(*args, **kwargs)\r\n 173 }\r\n 174 # apply actual function\r\n--> 175 out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n 176 datasets: List[\"Dataset\"] = list(out.values()) if isinstance(out, dict) else [out]\r\n 177 # re-apply format to the output\r\n\r\n~\/git\/datasets\/src\/datasets\/fingerprint.py in wrapper(*args, **kwargs)\r\n 338 # Call actual function\r\n 339 \r\n--> 340 out = func(self, *args, **kwargs)\r\n 341 \r\n 342 # Update fingerprint of in-place transforms + update in-place history of transforms\r\n\r\n~\/git\/datasets\/src\/datasets\/arrow_dataset.py in _map_single(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, new_fingerprint, rank, offset)\r\n 1959 if update_data:\r\n 1960 # Create new Dataset from buffer or file\r\n-> 1961 info = self.info.copy()\r\n 1962 info.features = writer._features\r\n 1963 if buf_writer is None:\r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in copy(self)\r\n 274 \r\n 275 def copy(self) -> \"DatasetInfo\":\r\n--> 276 return self.__class__(**{k: copy.deepcopy(v) for k, v in self.__dict__.items()})\r\n 277 \r\n 278 \r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in __init__(self, description, citation, homepage, license, features, post_processed, supervised_keys, task_templates, builder_name, config_name, version, splits, download_checksums, download_size, post_processing_size, dataset_size, size_in_bytes)\r\n\r\n~\/git\/datasets\/src\/datasets\/info.py in __post_init__(self)\r\n 174 # The reason is that Dataset.prepare_for_task calls Dataset.cast which converts the\r\n 175 # DatasetInfo.features to the new schema and thus template.label_column is no longer a valid key\r\n--> 176 object.__setattr__(template, \"labels\", tuple(self.features[template.label_column].names))\r\n 177 template.label_schema[\"labels\"] = ClassLabel(names=template.labels)\r\n 178 self.task_templates[idx] = template\r\n\r\nKeyError: 'label'\r\n```\r\n\r\nWhat do you think? I did this a bit quickly, so maybe I'm overlooking something obvious :) One thing would be to only update the labels of the task template on load, but this seems a bit hacky IMO","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2391\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2391","id":898128099,"node_id":"MDU6SXNzdWU4OTgxMjgwOTk=","number":2391,"title":"Missing original answers in kilt-TriviaQA","user":{"login":"PaulLerner","id":25532159,"node_id":"MDQ6VXNlcjI1NTMyMTU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25532159?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PaulLerner","html_url":"https:\/\/github.com\/PaulLerner","followers_url":"https:\/\/api.github.com\/users\/PaulLerner\/followers","following_url":"https:\/\/api.github.com\/users\/PaulLerner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PaulLerner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PaulLerner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PaulLerner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PaulLerner\/orgs","repos_url":"https:\/\/api.github.com\/users\/PaulLerner\/repos","events_url":"https:\/\/api.github.com\/users\/PaulLerner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PaulLerner\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-21T14:57:07Z","updated_at":"2021-06-14T17:29:11Z","closed_at":"2021-06-14T17:29:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I previously opened an issue at https:\/\/github.com\/facebookresearch\/KILT\/issues\/42 but from the answer of @fabiopetroni it seems that the problem comes from HF-datasets\r\n\r\n## Describe the bug\r\nThe `answer` field in kilt-TriviaQA, e.g. `kilt_tasks['train_triviaqa'][0]['output']['answer']` contains a list of alternative answer which are accepted for the question. \r\nHowever it'd be nice to know the original answer to the question (the only fields in `output` are `'answer', 'meta', 'provenance'`)\r\n\r\n## How to fix\r\nIt can be fixed by retrieving the original answer from the original TriviaQA (e.g. `trivia_qa['train'][0]['answer']['value']`), perhaps at the same place as here where one retrieves the questions https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/kilt_tasks\/README.md#loading-the-kilt-knowledge-source-and-task-data\r\n\r\ncc @yjernite who previously answered to an issue about KILT and TriviaQA :)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2390\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2390","id":897903642,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ5ODQ0NjQ2","number":2390,"title":"Add check for task templates on dataset load","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-21T10:16:57Z","updated_at":"2021-05-21T15:49:09Z","closed_at":"2021-05-21T15:49:06Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2390","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2390","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2390.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2390.patch"},"body":"This PR adds a check that the features of a dataset match the schema of each compatible task template.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2389\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2389","id":897822270,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ5Nzc3MDMz","number":2389,"title":"Insert task templates for text classification","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-21T08:36:26Z","updated_at":"2021-05-28T15:28:58Z","closed_at":"2021-05-28T15:26:28Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2389","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2389","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2389.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2389.patch"},"body":"This PR inserts text-classification templates for datasets with the following properties:\r\n\r\n* Only one config\r\n* At most two features of `(Value, ClassLabel)` type\r\n\r\nNote that this misses datasets like `sentiment140` which only has `Value` type features - these will be handled in a separate PR","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2388\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2388","id":897767470,"node_id":"MDU6SXNzdWU4OTc3Njc0NzA=","number":2388,"title":"Incorrect URLs for some datasets","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-21T07:22:35Z","updated_at":"2021-06-04T17:39:45Z","closed_at":"2021-06-04T17:39:45Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nIt seems that the URLs for the following datasets are invalid: \r\n\r\n- [ ] `bn_hate_speech` has been renamed: https:\/\/github.com\/rezacsedu\/Bengali-Hate-Speech-Dataset\/commit\/c67ecfc4184911e12814f6b36901f9828df8a63a\r\n- [ ] `covid_tweets_japanese` has been renamed: http:\/\/www.db.info.gifu-u.ac.jp\/covid-19-twitter-dataset\/\r\n\r\nAs a result we can no longer load these datasets using `load_dataset`. The simple fix is to rename the URL in the dataset script - will do this asap.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n# pick one of the datasets from the list above\r\nds = load_dataset(\"bn_hate_speech\")\r\n```\r\n\r\n## Expected results\r\nDataset loads without error.\r\n\r\n## Actual results\r\n```\r\nDownloading: 3.36kB [00:00, 1.07MB\/s] \r\nDownloading: 2.03kB [00:00, 678kB\/s] \r\nUsing custom data configuration default\r\nDownloading and preparing dataset bn_hate_speech\/default (download: 951.48 KiB, generated: 949.84 KiB, post-processed: Unknown size, total: 1.86 MiB) to \/Users\/lewtun\/.cache\/huggingface\/datasets\/bn_hate_speech\/default\/0.0.0\/a2dc726e511a2177523301bcad196af05d4d8a2cff30d2769ba8aacc1f5fdb5c...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 744, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 574, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 630, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/Users\/lewtun\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/bn_hate_speech\/a2dc726e511a2177523301bcad196af05d4d8a2cff30d2769ba8aacc1f5fdb5c\/bn_hate_speech.py\", line 76, in _split_generators\r\n train_path = dl_manager.download_and_extract(_URL)\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 287, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 195, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 195, in map_nested\r\n return function(data_struct)\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 218, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 281, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/Users\/lewtun\/miniconda3\/envs\/hf-hub_eval\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/rezacsedu\/Bengali-Hate-Speech-Dataset\/main\/Bengali_%20Hate_Speech_Dataset_Subset.csv\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.2.dev0\r\n- Platform: macOS-10.16-x86_64-i386-64bit\r\n- Python version: 3.8.8\r\n- PyArrow version: 3.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2387\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2387","id":897566666,"node_id":"MDU6SXNzdWU4OTc1NjY2NjY=","number":2387,"title":"datasets 1.6 ignores cache","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":13,"created_at":"2021-05-21T00:12:58Z","updated_at":"2021-05-26T16:07:54Z","closed_at":"2021-05-26T16:07:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Moving from https:\/\/github.com\/huggingface\/transformers\/issues\/11801#issuecomment-845546612 \r\n\r\nQuoting @VictorSanh:\r\n\r\n> \r\n> I downgraded datasets to `1.5.0` and printed `tokenized_datasets.cache_files` (L335):\r\n> \r\n> > `{'train': [{'filename': '\/home\/victor\/.cache\/huggingface\/datasets\/openwebtext10k\/plain_text\/1.0.0\/3a8df094c671b4cb63ed0b41f40fb3bd855e9ce2e3765e5df50abcdfb5ec144b\/cache-c6aefe81ca4e5152.arrow'}], 'validation': [{'filename': '\/home\/victor\/.cache\/huggingface\/datasets\/openwebtext10k\/plain_text\/1.0.0\/3a8df094c671b4cb63ed0b41f40fb3bd855e9ce2e3765e5df50abcdfb5ec144b\/cache-97cf4c813e6469c6.arrow'}]}`\r\n> \r\n> while the same command with the latest version of datasets (actually starting at `1.6.0`) gives:\r\n> > `{'train': [], 'validation': []}`\r\n> \r\n\r\nI also confirm that downgrading to `datasets==1.5.0` makes things fast again - i.e. cache is used.\r\n\r\nto reproduce:\r\n```\r\nUSE_TF=0 python examples\/pytorch\/language-modeling\/run_clm.py \\\r\n --model_name_or_path gpt2 \\\r\n --dataset_name \"stas\/openwebtext-10k\" \\\r\n --output_dir output_dir \\\r\n --overwrite_output_dir \\\r\n --do_train \\\r\n --do_eval \\\r\n --max_train_samples 1000 \\\r\n --max_eval_samples 200 \\\r\n --per_device_train_batch_size 4 \\\r\n --per_device_eval_batch_size 4 \\\r\n --num_train_epochs 1 \\\r\n --warmup_steps 8 \\\r\n --block_size 64 \\\r\n --fp16 \\\r\n --report_to none\r\n```\r\n\r\nthe first time the startup is slow and some 5 tqdm bars. It shouldn't do it on consequent runs. but with `datasets>1.5.0` it rebuilds on every run.\r\n\r\n@lhoestq \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2386\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2386","id":897560049,"node_id":"MDU6SXNzdWU4OTc1NjAwNDk=","number":2386,"title":"Accessing Arrow dataset cache_files","user":{"login":"Mehrad0711","id":28717374,"node_id":"MDQ6VXNlcjI4NzE3Mzc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/28717374?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Mehrad0711","html_url":"https:\/\/github.com\/Mehrad0711","followers_url":"https:\/\/api.github.com\/users\/Mehrad0711\/followers","following_url":"https:\/\/api.github.com\/users\/Mehrad0711\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Mehrad0711\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Mehrad0711\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Mehrad0711\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Mehrad0711\/orgs","repos_url":"https:\/\/api.github.com\/users\/Mehrad0711\/repos","events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Mehrad0711\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-20T23:57:43Z","updated_at":"2021-05-21T19:18:03Z","closed_at":"2021-05-21T19:18:03Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nIn datasets 1.5.0 the following code snippet would have printed the cache_files:\r\n\r\n```\r\ntrain_data = load_dataset('conll2003', split='train', cache_dir='data')\r\nprint(train_data.cache_files[0]['filename'])\r\n\r\n```\r\n\r\nHowever, in the newest release (1.6.1), it prints an empty list.\r\n\r\nI also tried loading the dataset with `keep_in_memory=True` argument but still `cache_files` is empty.\r\n\r\nWas wondering if this is a bug or I need to pass additional arguments so I can access the cache_files.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2385\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2385","id":897206823,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ5MjM1Mjcy","number":2385,"title":"update citations","user":{"login":"adeepH","id":46108405,"node_id":"MDQ6VXNlcjQ2MTA4NDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46108405?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adeepH","html_url":"https:\/\/github.com\/adeepH","followers_url":"https:\/\/api.github.com\/users\/adeepH\/followers","following_url":"https:\/\/api.github.com\/users\/adeepH\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adeepH\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adeepH\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adeepH\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adeepH\/orgs","repos_url":"https:\/\/api.github.com\/users\/adeepH\/repos","events_url":"https:\/\/api.github.com\/users\/adeepH\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adeepH\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-20T17:54:08Z","updated_at":"2021-05-21T12:38:18Z","closed_at":"2021-05-21T12:38:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2385","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2385","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2385.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2385.patch"},"body":"To update citations for [Offenseval_dravidiain](https:\/\/huggingface.co\/datasets\/offenseval_dravidian)\r\n ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2384\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2384","id":896866461,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ4OTI4NTQ0","number":2384,"title":"Add args description to DatasetInfo","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-20T13:53:10Z","updated_at":"2021-05-22T09:26:16Z","closed_at":"2021-05-22T09:26:14Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2384","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2384","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2384.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2384.patch"},"body":"Closes #2354 \r\n\r\nI am not sure what `post_processed` and `post_processing_size` correspond to, so have left them empty for now. I also took a guess at some of the other fields like `dataset_size` vs `size_in_bytes`, so might have misunderstood their meaning.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2383\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2383","id":895779723,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ3OTU4MTQ0","number":2383,"title":"Improve example in rounding docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-19T18:59:23Z","updated_at":"2021-05-21T12:53:22Z","closed_at":"2021-05-21T12:36:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2383","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2383","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2383.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2383.patch"},"body":"Improves the example in the rounding subsection of the Split API docs. With this change, it should more clear what's the difference between the `closest` and the `pct1_dropremainder` rounding.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2382\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2382","id":895610216,"node_id":"MDU6SXNzdWU4OTU2MTAyMTY=","number":2382,"title":"DuplicatedKeysError: FAILURE TO GENERATE DATASET ! load_dataset('head_qa', 'en')","user":{"login":"helloworld123-lab","id":75953751,"node_id":"MDQ6VXNlcjc1OTUzNzUx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/75953751?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/helloworld123-lab","html_url":"https:\/\/github.com\/helloworld123-lab","followers_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/followers","following_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/orgs","repos_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/repos","events_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/helloworld123-lab\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-19T15:49:48Z","updated_at":"2021-05-30T13:26:16Z","closed_at":"2021-05-30T13:26:16Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello everyone,\r\n\r\nI try to use head_qa dataset in [https:\/\/huggingface.co\/datasets\/viewer\/?dataset=head_qa&config=en](url)\r\n\r\n```\r\n!pip install datasets\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\r\n 'head_qa', 'en')\r\n```\r\nWhen I write above load_dataset(.), it throws the following:\r\n\r\n```\r\nDuplicatedKeysError Traceback (most recent call last)\r\n\r\n in ()\r\n 2 from datasets import load_dataset\r\n 3 dataset = load_dataset(\r\n----> 4 'head_qa', 'en')\r\n\r\n5 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_writer.py in check_duplicate_keys(self)\r\n 347 for hash, key in self.hkey_record:\r\n 348 if hash in tmp_record:\r\n--> 349 raise DuplicatedKeysError(key)\r\n 350 else:\r\n 351 tmp_record.add(hash)\r\n\r\nDuplicatedKeysError: FAILURE TO GENERATE DATASET !\r\nFound duplicate Key: 1\r\nKeys should be unique and deterministic in nature\r\n```\r\nHow can I fix the error? Thanks\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2381\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2381","id":895588844,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ3NzkyNDcw","number":2381,"title":"add dataset card title","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-19T15:30:03Z","updated_at":"2021-05-20T18:51:40Z","closed_at":"2021-05-20T18:51:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2381","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2381","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2381.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2381.patch"},"body":"few of them were missed by me earlier which I've added now","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2380","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2380\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2380\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2380\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2380","id":895367201,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ3NTk3NTc3","number":2380,"title":"maintain YAML structure reading from README","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-19T12:12:07Z","updated_at":"2021-05-19T13:08:38Z","closed_at":"2021-05-19T13:08:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2380","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2380","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2380.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2380.patch"},"body":"How YAML used be loaded earlier in the string (structure of YAML was affected because of this and YAML for datasets with multiple configs was not being loaded correctly):\r\n```\r\nannotations_creators:\r\nlabeled_final:\r\n- expert-generated\r\nlabeled_swap:\r\n- expert-generated\r\nunlabeled_final:\r\n- machine-generated\r\nlanguage_creators:\r\n- machine-generated\r\nlanguages:\r\n- en\r\nlicenses:\r\n- other\r\nmultilinguality:\r\n- monolingual\r\nsize_categories:\r\nlabeled_final:\r\n- 10K\r\n- `datasets` version: datasets-1.6.2\r\n- Platform: Linux\r\n- Python version: 3.7\r\n- PyArrow version: 0.17.1, also 2.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2376\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2376","id":894852264,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ3MTU1NDE4","number":2376,"title":"Improve task api code quality","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-18T23:13:40Z","updated_at":"2021-06-02T20:39:57Z","closed_at":"2021-05-25T15:30:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2376","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2376","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2376.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2376.patch"},"body":"Improves the code quality of the `TaskTemplate` dataclasses.\r\n\r\nChanges:\r\n* replaces `return NotImplemented` with raise `NotImplementedError` \r\n* replaces `sorted` with `len` in the uniqueness check \r\n* defines `label2id` and `id2label` in the `TextClassification` template as properties\r\n* replaces the `object.__setattr__(self, attr, value)` syntax with (IMO nicer) `self.__dict__[attr] = value`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2375\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2375","id":894655157,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2OTg2NTcw","number":2375,"title":"Dataset Streaming","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-18T18:20:00Z","updated_at":"2021-06-23T16:35:02Z","closed_at":"2021-06-23T16:35:01Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2375","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2375","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2375.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2375.patch"},"body":"# Dataset Streaming\r\n\r\n## API\r\n\r\nCurrent API is\r\n\r\n```python\r\nfrom datasets import load_dataset\r\n\r\n# Load an IterableDataset without downloading data\r\nsnli = load_dataset(\"snli\", streaming=True)\r\n\r\n# Access examples by streaming data\r\nprint(next(iter(snli[\"train\"]))) \r\n# {'premise': 'A person on a horse jumps over a broken down airplane.',\r\n# 'hypothesis': 'A person is training his horse for a competition.',\r\n# 'label': 1}\r\n```\r\n\r\nI already implemented a few methods:\r\n- IterableDataset.map: apply transforms on-the-fly to the examples\r\n- IterableDataset.shuffle: shuffle the data _a la_ TFDS, i.e. with a shuffling buffer\r\n- IterableDataset.with_format: set the format to `\"torch\"` to get a `torch.utils.data.IterableDataset`\r\n- merge_datasets: merge two iterable datasets by alternating one or the other (you can specify the probabilities)\r\n\r\nI would love to have your opinion on the API design :)\r\n\r\n## Implementation details\r\n\r\n### Streaming\r\n\r\nData streaming is done using `fsspec` which has nice caching features.\r\n\r\nTo make dataset streaming work I extend the `open` function of dataset scripts to support opening remote files without downloading them entirely. It also works with remote compressed archives (currently only zip is supported):\r\n\r\n```python\r\n# Get a file-like object by streaming data from a remote file\r\nopen(\"https:\/\/github.com\/davidsbatista\/NER-datasets\/raw\/master\/CONLL2003\/train.txt\")\r\n\r\n# Get a file-like object by streaming data from a remote compressed archive by using the hop separator \"::\"\r\nopen(\"zip:\/\/snli_1.0_train.txt::https:\/\/nlp.stanford.edu\/projects\/snli\/snli_1.0.zip\")\r\n```\r\n\r\nI also extend the `os.path.join` function to support navigation in remote compressed archives, since it has to deal with the `\"::\"` separator. This separator is used by `fsspec`.\r\n\r\nFinally I also added a retry mechanism in case the connection fails during data streaming.\r\n\r\n### Transforms\r\n\r\nAn IterableDataset wraps an ExamplesIterable instance. There are different subclasses depending on the transforms we want to apply:\r\n- ExamplesIterable: the basic one\r\n- MappedExamplesIterable: an iterable with a `map` function applied on the fly\r\n- BufferShuffledExamplesIterable: an iterable with a shuffling buffer\r\n- CyclingMultiSourcesExamplesIterable: alternates between several ExamplesIterable\r\n- RandomlyCyclingMultiSourcesExamplesIterable: randomly alternates between several ExamplesIterable\r\n\r\n### DatasetBuilder\r\n\r\nI use the same builders as usual. I just added a new method `_get_examples_iterable_for_split` to get an ExamplesIterable for a given split. Currently only the GeneratorBasedBuilder and the ArrowBasedBuilder implement it.\r\n\r\nThe BeamBasedBuilder doesn't implement it yet.\r\nIt means that datasets like wikipedia and natural_questions can't be loaded as IterableDataset for now.\r\n\r\n## Other details\r\n\r\nI may have to do some changes in many dataset script to use `download` instead of `download_and_extract` when extraction is not needed. This will avoid errors for streaming.<\/s>\r\n\r\nEDIT: Actually I just check for the extension of the file to do extraction only if needed.\r\n\r\nEDIT2: It's not possible to stream from .tar.gz files without downloading the file completely. For now I raise an error if one want to get a streaming dataset based on .tar.gz files.\r\n\r\n## TODO\r\n\r\nusual stuff:\r\n\r\n- [x] make streaming dependency \"aiohttp\" optional: `pip install datasets[streaming]`\r\n- [x] tests\r\n- [x] docs","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2374\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2374","id":894579364,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2OTIyMjkw","number":2374,"title":"add `desc` to `tqdm` in `Dataset.map()`","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-18T16:44:29Z","updated_at":"2021-05-27T15:44:04Z","closed_at":"2021-05-26T14:59:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2374","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2374","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2374.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2374.patch"},"body":"Fixes #2330. Please let me know if anything is also required in this ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2373\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2373","id":894499909,"node_id":"MDU6SXNzdWU4OTQ0OTk5MDk=","number":2373,"title":"Loading dataset from local path","user":{"login":"kolakows","id":34172905,"node_id":"MDQ6VXNlcjM0MTcyOTA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/34172905?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kolakows","html_url":"https:\/\/github.com\/kolakows","followers_url":"https:\/\/api.github.com\/users\/kolakows\/followers","following_url":"https:\/\/api.github.com\/users\/kolakows\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kolakows\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kolakows\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kolakows\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kolakows\/orgs","repos_url":"https:\/\/api.github.com\/users\/kolakows\/repos","events_url":"https:\/\/api.github.com\/users\/kolakows\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kolakows\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-18T15:20:50Z","updated_at":"2021-05-18T15:36:36Z","closed_at":"2021-05-18T15:36:35Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I'm trying to load a local dataset with the code below\r\n\r\n```\r\nds = datasets.load_dataset('my_script.py', \r\n data_files='corpus.txt', \r\n data_dir='\/data\/dir', \r\n cache_dir='.')\r\n```\r\nBut internally a BuilderConfig is created, which tries to use getmtime on the data_files string, without using data_dir. Is this a bug or am I not using the load_dataset correctly?\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/bc61954083f74e6460688202e9f77dde2475319c\/src\/datasets\/builder.py#L153","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2372\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2372","id":894496064,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2ODUxODc2","number":2372,"title":"ConvQuestions benchmark added","user":{"login":"PhilippChr","id":24608689,"node_id":"MDQ6VXNlcjI0NjA4Njg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24608689?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilippChr","html_url":"https:\/\/github.com\/PhilippChr","followers_url":"https:\/\/api.github.com\/users\/PhilippChr\/followers","following_url":"https:\/\/api.github.com\/users\/PhilippChr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilippChr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilippChr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilippChr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilippChr\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilippChr\/repos","events_url":"https:\/\/api.github.com\/users\/PhilippChr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilippChr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-18T15:16:50Z","updated_at":"2021-05-26T10:31:45Z","closed_at":"2021-05-26T10:31:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2372","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2372","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2372.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2372.patch"},"body":"Hello,\r\nI would like to integrate our dataset on conversational QA. The answers are grounded in the KG.\r\nThe work was published in CIKM 2019 (https:\/\/dl.acm.org\/doi\/10.1145\/3357384.3358016).\r\nWe hope for further research on how to deal with the challenges of factoid conversational QA.\r\nThanks! :)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2371\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2371","id":894193403,"node_id":"MDU6SXNzdWU4OTQxOTM0MDM=","number":2371,"title":"Align question answering tasks with sub-domains","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-18T09:47:59Z","updated_at":"2021-05-18T09:49:22Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As pointed out by @thomwolf in #2255 we should consider breaking with the pipeline taxonomy of `transformers` to account for the various types of question-answering domains:\r\n\r\n> `question-answering` exists in two forms: abstractive and extractive question answering.\r\n> \r\n> we can keep a generic `question-answering` but then it will probably mean diferrent schema of input\/output for both (abstractive will have text for both while extractive can use spans indication as well as text).\r\n> \r\n> Or we can also propose to use `abstractive-question-answering` and `extractive-question-answering` for instance.\r\n> Maybe we could have `question-answering-abstractive` and `question-answering-extractive` if somehow we can use a for a completion or search in the future (detail).\r\n> Actually I see that people are more organizing in terms of general and sub-tasks, for instance on paperwithcode: https:\/\/paperswithcode.com\/area\/natural-language-processing and on nlpprogress: https:\/\/github.com\/sebastianruder\/NLP-progress\/blob\/master\/english\/question_answering.md#squad\r\n> \r\n> Probably the best is to align with one of these in terms of denomination, PaperWithCode is probably the most active and maintained and we work with them as well.\r\n> Maybe you want to check with a few QA datasets that this schema make sense. Typically NaturalQuestions, TriviaQA and can be good second datasets to compare to and be sure of the generality of the schema.\r\n> \r\n> A good recent list of QA datasets to compare the schemas among, is for instance in the UnitedQA paper: https:\/\/arxiv.org\/abs\/2101.00178\r\n\r\nInvestigate which grouping of QA is best suited for `datasets` and adapt \/ extend the QA task template accordingly.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2370\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2370","id":893606432,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2MDkyNDQy","number":2370,"title":"Adding HendrycksTest dataset","user":{"login":"andyzoujm","id":43451571,"node_id":"MDQ6VXNlcjQzNDUxNTcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/43451571?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/andyzoujm","html_url":"https:\/\/github.com\/andyzoujm","followers_url":"https:\/\/api.github.com\/users\/andyzoujm\/followers","following_url":"https:\/\/api.github.com\/users\/andyzoujm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/andyzoujm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/andyzoujm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/andyzoujm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/andyzoujm\/orgs","repos_url":"https:\/\/api.github.com\/users\/andyzoujm\/repos","events_url":"https:\/\/api.github.com\/users\/andyzoujm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/andyzoujm\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-17T18:53:05Z","updated_at":"2021-05-31T16:37:13Z","closed_at":"2021-05-31T16:37:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2370","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2370","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2370.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2370.patch"},"body":"Adding Hendrycks test from https:\/\/arxiv.org\/abs\/2009.03300.\r\nI'm having a bit of trouble with dummy data creation because some lines in the csv files aren't being loaded properly (only the first entry loaded in a row of length 6). The dataset is loading just fine. Hope you can kindly help!\r\nThank you!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2369\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2369","id":893554153,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ2MDQ5NDM1","number":2369,"title":"correct labels of conll2003","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-17T17:37:54Z","updated_at":"2021-05-18T08:27:42Z","closed_at":"2021-05-18T08:27:42Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2369","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2369","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2369.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2369.patch"},"body":"# What does this PR\r\n\r\nIt fixes\/extends the `ner_tags` for conll2003 to include all. \r\nPaper reference https:\/\/arxiv.org\/pdf\/cs\/0306050v1.pdf\r\nModel reference https:\/\/huggingface.co\/elastic\/distilbert-base-cased-finetuned-conll03-english\/blob\/main\/config.json \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2368\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2368","id":893411076,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ1OTI5NzM0","number":2368,"title":"Allow \"other-X\" in licenses","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-17T14:47:54Z","updated_at":"2021-05-17T16:36:27Z","closed_at":"2021-05-17T16:36:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2368","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2368","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2368.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2368.patch"},"body":"This PR allows \"other-X\" licenses during metadata validation.\r\n\r\n@lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2367\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2367","id":893317427,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ1ODUxNTE0","number":2367,"title":"Remove getchildren from hyperpartisan news detection","user":{"login":"ghomasHudson","id":13795113,"node_id":"MDQ6VXNlcjEzNzk1MTEz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13795113?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ghomasHudson","html_url":"https:\/\/github.com\/ghomasHudson","followers_url":"https:\/\/api.github.com\/users\/ghomasHudson\/followers","following_url":"https:\/\/api.github.com\/users\/ghomasHudson\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ghomasHudson\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ghomasHudson\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ghomasHudson\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ghomasHudson\/orgs","repos_url":"https:\/\/api.github.com\/users\/ghomasHudson\/repos","events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ghomasHudson\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-17T13:10:37Z","updated_at":"2021-05-17T14:07:13Z","closed_at":"2021-05-17T14:07:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2367","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2367","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2367.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2367.patch"},"body":"`Element.getchildren()` is now deprecated in the ElementTree library (I think in python 3.9, so it still passes the automated tests which are using 3.6. But for those of us on bleeding-edge distros it now fails).\r\n\r\nhttps:\/\/bugs.python.org\/issue29209","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2366\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2366","id":893185266,"node_id":"MDU6SXNzdWU4OTMxODUyNjY=","number":2366,"title":"Json loader fails if user-specified features don't match the json data fields order","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-17T10:26:08Z","updated_at":"2021-06-16T10:47:49Z","closed_at":"2021-06-16T10:47:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"If you do\r\n```python\r\ndataset = load_dataset(\"json\", data_files=data_files, features=features)\r\n```\r\nThen depending on the order of the features in the json data field it fails:\r\n```python\r\n[...]\r\n~\/Desktop\/hf\/datasets\/src\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files)\r\n 94 if self.config.schema:\r\n 95 # Cast allows str <-> int\/float, while parse_option explicit_schema does NOT\r\n---> 96 pa_table = pa_table.cast(self.config.schema)\r\n 97 yield i, pa_table\r\n[...]\r\nValueError: Target schema's field names are not matching the table's field names: ['tokens', 'ner_tags'], ['ner_tags', 'tokens']\r\n```\r\n\r\nThis is because one must first re-order the columns of the table to match the `self.config.schema` before calling cast.\r\n\r\nOne way to fix the `cast` would be to replace it with:\r\n```python\r\n# reorder the arrays if necessary + cast to schema\r\n# we can't simply use .cast here because we may need to change the order of the columns\r\npa_table = pa.Table.from_arrays([pa_table[name] for name in schema.names], schema=schema)\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2365\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2365","id":893179697,"node_id":"MDU6SXNzdWU4OTMxNzk2OTc=","number":2365,"title":"Missing ClassLabel encoding in Json loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/5","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/5\/labels","id":6808903,"node_id":"MDk6TWlsZXN0b25lNjgwODkwMw==","number":5,"title":"1.9","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":12,"state":"closed","created_at":"2021-05-31T16:13:06Z","updated_at":"2021-07-12T14:12:00Z","due_on":"2021-07-08T07:00:00Z","closed_at":"2021-07-09T05:50:07Z"},"comments":0,"created_at":"2021-05-17T10:19:10Z","updated_at":"2021-06-28T15:05:34Z","closed_at":"2021-06-28T15:05:34Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Currently if you want to load a json dataset this way\r\n```python\r\ndataset = load_dataset(\"json\", data_files=data_files, features=features)\r\n```\r\nThen if your features has ClassLabel types and if your json data needs class label encoding (i.e. if the labels in the json files are strings and not integers), then it would fail:\r\n```python\r\n[...]\r\n~\/Desktop\/hf\/datasets\/src\/datasets\/packaged_modules\/json\/json.py in _generate_tables(self, files)\r\n 94 if self.config.schema:\r\n 95 # Cast allows str <-> int\/float, while parse_option explicit_schema does NOT\r\n---> 96 pa_table = pa_table.cast(self.config.schema)\r\n 97 yield i, pa_table\r\n[...]\r\nArrowInvalid: Failed to parse string: 'O' as a scalar of type int64\r\n```\r\n\r\nThis is because it just tries to cast the string data to integers, without applying the mapping str->int first\r\n\r\nThe current workaround is to do instead\r\n```python\r\ndataset = load_dataset(\"json\", data_files=data_files)\r\ndataset = dataset.map(features.encode_example, features=features)\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2364\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2364","id":892420500,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ1MTI4MDYx","number":2364,"title":"README updated for SNLI, MNLI","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-15T11:37:59Z","updated_at":"2021-05-17T14:14:27Z","closed_at":"2021-05-17T13:34:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2364","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2364","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2364.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2364.patch"},"body":"Closes #2275. Mentioned about -1 labels in MNLI, SNLI and how they should be removed before training. @lhoestq `check_code_quality` test might fail for MNLI as the license name `other-Open Portion of the American National Corpus` is not a registered tag for 'licenses'","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2363\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2363","id":892391232,"node_id":"MDU6SXNzdWU4OTIzOTEyMzI=","number":2363,"title":"Trying to use metric.compute but get OSError","user":{"login":"hyusterr","id":52968111,"node_id":"MDQ6VXNlcjUyOTY4MTEx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52968111?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hyusterr","html_url":"https:\/\/github.com\/hyusterr","followers_url":"https:\/\/api.github.com\/users\/hyusterr\/followers","following_url":"https:\/\/api.github.com\/users\/hyusterr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hyusterr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hyusterr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hyusterr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hyusterr\/orgs","repos_url":"https:\/\/api.github.com\/users\/hyusterr\/repos","events_url":"https:\/\/api.github.com\/users\/hyusterr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hyusterr\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-15T08:39:06Z","updated_at":"2021-05-17T10:33:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I want to use metric.compute from load_metric('accuracy') to get training accuracy, but receive OSError. I am wondering what is the mechanism behind the metric calculation, why would it report an OSError?\r\n\r\n```python\r\n195 for epoch in range(num_train_epochs):\r\n196 model.train()\r\n197 for step, batch in enumerate(train_loader):\r\n198 # print(batch['input_ids'].shape)\r\n199 outputs = model(**batch)\r\n200\r\n201 loss = outputs.loss\r\n202 loss \/= gradient_accumulation_steps\r\n203 accelerator.backward(loss)\r\n204\r\n205 predictions = outputs.logits.argmax(dim=-1)\r\n206 metric.add_batch(\r\n207 predictions=accelerator.gather(predictions),\r\n208 references=accelerator.gather(batch['labels'])\r\n209 )\r\n210 progress_bar.set_postfix({'loss': loss.item(), 'train batch acc.': train_metrics})\r\n211\r\n212 if (step + 1) % 50 == 0 or step == len(train_loader) - 1:\r\n213 train_metrics = metric.compute()\r\n```\r\n\r\nthe error message is as below:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"run_multi.py\", line 273, in \r\n main()\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/click\/core.py\", line 829, in __call__\r\n return self.main(*args, **kwargs)\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/click\/core.py\", line 782, in main\r\n rv = self.invoke(ctx)\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/click\/core.py\", line 1066, in invoke\r\n return ctx.invoke(self.callback, **ctx.params)\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/click\/core.py\", line 610, in invoke\r\n return callback(*args, **kwargs)\r\n File \"run_multi.py\", line 213, in main\r\n train_metrics = metric.compute()\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/datasets\/metric.py\", line 391, in compute\r\n self._finalize()\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/datasets\/metric.py\", line 342, in _finalize\r\n self.writer.finalize()\r\n File \"\/home\/yshuang\/.local\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py\", line 370, in finalize\r\n self.stream.close()\r\n File \"pyarrow\/io.pxi\", line 132, in pyarrow.lib.NativeFile.close\r\n File \"pyarrow\/error.pxi\", line 99, in pyarrow.lib.check_status\r\nOSError: error closing file\r\n```\r\n\r\n## Environment info\r\n\r\n- `datasets` version: 1.6.1\r\n- Platform: Linux NAME=\"Ubuntu\" VERSION=\"20.04.1 LTS (Focal Fossa)\"\r\n- Python version: python3.8.5\r\n- PyArrow version: 4.0.0\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2362\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2362","id":892100749,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ0ODYzOTQw","number":2362,"title":"Fix web_nlg metadata","user":{"login":"julien-c","id":326577,"node_id":"MDQ6VXNlcjMyNjU3Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/326577?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/julien-c","html_url":"https:\/\/github.com\/julien-c","followers_url":"https:\/\/api.github.com\/users\/julien-c\/followers","following_url":"https:\/\/api.github.com\/users\/julien-c\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/julien-c\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/julien-c\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/julien-c\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/julien-c\/orgs","repos_url":"https:\/\/api.github.com\/users\/julien-c\/repos","events_url":"https:\/\/api.github.com\/users\/julien-c\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/julien-c\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-14T17:15:07Z","updated_at":"2021-05-17T13:44:17Z","closed_at":"2021-05-17T13:42:28Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2362","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2362","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2362.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2362.patch"},"body":"Our metadata storage system does not support `.` inside keys. cc @Pierrci \r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2361\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2361","id":891982808,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ0NzYzNTU4","number":2361,"title":"preserve dtype for numpy arrays","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-14T14:45:23Z","updated_at":"2021-07-30T07:28:23Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2361","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2361","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2361.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2361.patch"},"body":"Fixes #625. This lets the user preserve the dtype of numpy array to pyarrow array which was getting lost due to conversion of numpy array -> list -> pyarrow array. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2360\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2360","id":891965964,"node_id":"MDU6SXNzdWU4OTE5NjU5NjQ=","number":2360,"title":"Automatically detect datasets with compatible task schemas","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-14T14:23:40Z","updated_at":"2021-05-14T14:23:40Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"See description of #2255 for details.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2359\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2359","id":891946017,"node_id":"MDU6SXNzdWU4OTE5NDYwMTc=","number":2359,"title":"Allow model labels to be passed during task preparation","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-14T13:58:28Z","updated_at":"2021-05-14T13:58:28Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Models have a config with label2id. And we have the same for datasets with the ClassLabel feature type. At one point either the model or the dataset must sync with the other. It would be great to do that on the dataset side.\r\n\r\nFor example for sentiment classification on amazon reviews with you could have these labels:\r\n- \"1 star\", \"2 stars\", \"3 stars\", \"4 stars\", \"5 stars\"\r\n- \"1\", \"2\", \"3\", \"4\", \"5\"\r\n\r\nSome models may use the first set, while other models use the second set.\r\n\r\nHere in the `TextClassification` class, the user can only specify one set of labels, while many models could actually be compatible but have different sets of labels. Should we allow users to pass a list of compatible labels sets ?\r\n\r\nThen in terms of API, users could use `dataset.prepare_for_task(\"text-classification\", labels=model.labels)` or something like that.\r\n\r\nThe label set could also be the same but not in the same order. For NLI for example, some models use `[\"neutral\", \"entailment\", \"contradiction\"]` and some others use `[\"neutral\", \"contradiction\", \"entailment\"]`, so we should take care of updating the order of the labels in the dataset to match the labels order of the model.\r\n\r\nLet me know what you think ! This can be done in a future PR\r\n\r\n_Originally posted by @lhoestq in https:\/\/github.com\/huggingface\/datasets\/pull\/2255#discussion_r632412792_","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2358\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2358","id":891269577,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQ0MTYyOTY2","number":2358,"title":"Roman Urdu Stopwords List","user":{"login":"devzohaib","id":58664161,"node_id":"MDQ6VXNlcjU4NjY0MTYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/58664161?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/devzohaib","html_url":"https:\/\/github.com\/devzohaib","followers_url":"https:\/\/api.github.com\/users\/devzohaib\/followers","following_url":"https:\/\/api.github.com\/users\/devzohaib\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/devzohaib\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/devzohaib\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/devzohaib\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/devzohaib\/orgs","repos_url":"https:\/\/api.github.com\/users\/devzohaib\/repos","events_url":"https:\/\/api.github.com\/users\/devzohaib\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/devzohaib\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-13T18:29:27Z","updated_at":"2021-05-19T08:50:43Z","closed_at":"2021-05-17T14:05:10Z","author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2358","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2358","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2358.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2358.patch"},"body":"A list of most frequently used Roman Urdu words with different spellings and usages.\r\nThis is a very basic effort to collect some basic stopwords for Roman Urdu to help efforts of analyzing text data in roman Urdu which makes up a huge part of daily internet interaction of Roman-Urdu users.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2357\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2357","id":890595693,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQzNTk0NDcz","number":2357,"title":"Adding Microsoft CodeXGlue Datasets","user":{"login":"ncoop57","id":7613470,"node_id":"MDQ6VXNlcjc2MTM0NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7613470?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ncoop57","html_url":"https:\/\/github.com\/ncoop57","followers_url":"https:\/\/api.github.com\/users\/ncoop57\/followers","following_url":"https:\/\/api.github.com\/users\/ncoop57\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ncoop57\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ncoop57\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ncoop57\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ncoop57\/orgs","repos_url":"https:\/\/api.github.com\/users\/ncoop57\/repos","events_url":"https:\/\/api.github.com\/users\/ncoop57\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ncoop57\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2021-05-13T00:43:01Z","updated_at":"2021-06-08T09:29:57Z","closed_at":"2021-06-08T09:29:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2357","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2357","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2357.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2357.patch"},"body":"Hi there, this is a new pull request to get the CodeXGlue datasets into the awesome HF datasets lib. Most of the work has been done in this PR #997 by the awesome @madlag. However, that PR has been stale for a while now and so I spoke with @lhoestq about finishing up the final mile and so he told me to open a new PR with the final changes :smile:. \r\n\r\nI believe I've met all of the changes still left in the old PR to do, except for the change to the languages. I believe the READMEs should include the different programming languages used rather than just using the tag \"code\" as when searching for datasets, SE researchers may specifically be looking only for what type of programming language and so being able to quickly filter will be very valuable. Let me know what you think of that or if you still believe it should be the \"code\" tag @lhoestq.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2356\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2356","id":890511019,"node_id":"MDU6SXNzdWU4OTA1MTEwMTk=","number":2356,"title":"How to Add New Metrics Guide","user":{"login":"ncoop57","id":7613470,"node_id":"MDQ6VXNlcjc2MTM0NzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7613470?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ncoop57","html_url":"https:\/\/github.com\/ncoop57","followers_url":"https:\/\/api.github.com\/users\/ncoop57\/followers","following_url":"https:\/\/api.github.com\/users\/ncoop57\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ncoop57\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ncoop57\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ncoop57\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ncoop57\/orgs","repos_url":"https:\/\/api.github.com\/users\/ncoop57\/repos","events_url":"https:\/\/api.github.com\/users\/ncoop57\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ncoop57\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-12T21:42:06Z","updated_at":"2021-05-31T18:49:35Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nCurrently there is an absolutely fantastic guide for how to contribute a new dataset to the library. However, there isn't one for adding new metrics.\r\n\r\n**Describe the solution you'd like**\r\nI'd like for a guide in a similar style to the dataset guide for adding metrics. I believe many of the content in the dataset guide such as setup can be easily copied over with minimal changes. Also, from what I've seen with existing metrics, it shouldn't be as complicated, especially in documentation of the metric, mainly just citation and usage. The most complicated part I see would be in automated tests that run the new metrics, but y'all's test suite seem pretty comprehensive, so it might not be that hard.\r\n\r\n**Describe alternatives you've considered**\r\nOne alternative would be just not having the metrics be community generated and so would not need a step by step guide. New metrics would just be proposed as issues and the internal team would take care of them. However, I think it makes more sense to have a step by step guide for contributors to follow.\r\n\r\n**Additional context**\r\nI'd be happy to help with creating this guide as I am very interested in adding software engineering metrics to the library :nerd_face:, the part I would need guidance on would be testing.\r\n\r\nP.S. Love the library and community y'all have built! :hugs: \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2355\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2355","id":890484408,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQzNDk5NTIz","number":2355,"title":"normalized TOCs and titles in data cards","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-12T20:59:59Z","updated_at":"2021-05-14T13:23:12Z","closed_at":"2021-05-14T13:23:12Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2355","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2355","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2355.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2355.patch"},"body":"I started fixing some of the READMEs that were failing the tests introduced by @gchhablani but then realized that there were some consistent differences between earlier and newer versions of some of the titles (e.g. Data Splits vs Data Splits Sample Size, Supported Tasks vs Supported Tasks and Leaderboards). We also had different versions of the Table of Content\r\n\r\nThis PR normalizes all of them to the newer version","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2354\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2354","id":890439523,"node_id":"MDU6SXNzdWU4OTA0Mzk1MjM=","number":2354,"title":"Document DatasetInfo attributes","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-05-12T20:01:29Z","updated_at":"2021-05-22T09:26:14Z","closed_at":"2021-05-22T09:26:14Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nAs noted in PR #2255, the attributes of `DatasetInfo` are not documented in the [docs](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html?highlight=datasetinfo#datasetinfo). It would be nice to do so :)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2353\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2353","id":890296262,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQzMzM4MDcz","number":2353,"title":"Update README vallidation rules","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-12T16:57:26Z","updated_at":"2021-05-14T08:56:06Z","closed_at":"2021-05-14T08:56:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2353","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2353","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2353.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2353.patch"},"body":"This PR allows unexpected subsections under third-level headings. All except `Contributions`.\r\n\r\n@lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2352\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2352","id":889810100,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQyOTI4NTgz","number":2352,"title":"Set to_json default to JSON lines","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-12T08:19:25Z","updated_at":"2021-05-21T09:01:14Z","closed_at":"2021-05-21T09:01:13Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2352","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2352","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2352.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2352.patch"},"body":"With this PR, the method `Dataset.to_json`:\r\n- is added to the docs\r\n- defaults to JSON lines","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2351\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2351","id":889584953,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQyNzI5NDIz","number":2351,"title":"simpllify faiss index save","user":{"login":"Guitaricet","id":2821124,"node_id":"MDQ6VXNlcjI4MjExMjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2821124?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Guitaricet","html_url":"https:\/\/github.com\/Guitaricet","followers_url":"https:\/\/api.github.com\/users\/Guitaricet\/followers","following_url":"https:\/\/api.github.com\/users\/Guitaricet\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Guitaricet\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Guitaricet\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Guitaricet\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Guitaricet\/orgs","repos_url":"https:\/\/api.github.com\/users\/Guitaricet\/repos","events_url":"https:\/\/api.github.com\/users\/Guitaricet\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Guitaricet\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-12T03:54:10Z","updated_at":"2021-05-17T13:41:41Z","closed_at":"2021-05-17T13:41:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2351","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2351","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2351.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2351.patch"},"body":"Fixes #2350\r\n\r\nIn some cases, Faiss GPU index objects do not have neither \"device\" nor \"getDevice\". Possibly this happens when some part of the index is computed on CPU.\r\n\r\nIn particular, this would happen with the index `OPQ16_128,IVF512,PQ32` (issue #2350). I did check it, but it is likely that `OPQ` or `PQ` transforms cause it.\r\n\r\nI propose, instead of using the index object to get the device, to infer it form the `FaissIndex.device` field as it is done in `.add_vectors`. Here we assume that `.device` always corresponds to the index placement and it seems reasonable. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2350\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2350","id":889580247,"node_id":"MDU6SXNzdWU4ODk1ODAyNDc=","number":2350,"title":"`FaissIndex.save` throws error on GPU","user":{"login":"Guitaricet","id":2821124,"node_id":"MDQ6VXNlcjI4MjExMjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2821124?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Guitaricet","html_url":"https:\/\/github.com\/Guitaricet","followers_url":"https:\/\/api.github.com\/users\/Guitaricet\/followers","following_url":"https:\/\/api.github.com\/users\/Guitaricet\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Guitaricet\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Guitaricet\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Guitaricet\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Guitaricet\/orgs","repos_url":"https:\/\/api.github.com\/users\/Guitaricet\/repos","events_url":"https:\/\/api.github.com\/users\/Guitaricet\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Guitaricet\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-12T03:41:56Z","updated_at":"2021-05-17T13:41:41Z","closed_at":"2021-05-17T13:41:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAfter training an index with a factory string `OPQ16_128,IVF512,PQ32` on GPU, `.save_faiss_index` throws this error.\r\n\r\n```\r\n File \"index_wikipedia.py\", line 119, in \r\n data[\"train\"].save_faiss_index(\"text_emb\", index_save_path)\r\n File \"\/home\/vlialin\/miniconda3\/envs\/cat\/lib\/python3.8\/site-packages\/datasets\/search.py\", line 470, in save_faiss_index\r\n index.save(file)\r\n File \"\/home\/vlialin\/miniconda3\/envs\/cat\/lib\/python3.8\/site-packages\/datasets\/search.py\", line 334, in save\r\n faiss.write_index(index, str(file))\r\n File \"\/home\/vlialin\/miniconda3\/envs\/cat\/lib\/python3.8\/site-packages\/faiss\/swigfaiss_avx2.py\", line 5654, in write_index\r\n return _swigfaiss.write_index(*args)\r\nRuntimeError: Error in void faiss::write_index(const faiss::Index*, faiss::IOWriter*) at \/root\/miniconda3\/conda-bld\/faiss-pkg_1613235005464\/work\/faiss\/impl\/index_write.cpp:453: don't know how to serialize this type of index\r\n```\r\n\r\n## Steps to reproduce the bug\r\n\r\nAny dataset will do, I just selected a familiar one.\r\n\r\n```python\r\nimport numpy as np\r\nimport datasets\r\nINDEX_STR = \"OPQ16_128,IVF512,PQ32\"\r\nINDEX_SAVE_PATH = \"will_not_save.faiss\"\r\n\r\ndata = datasets.load_dataset(\"Fraser\/news-category-dataset\", split=f\"train[:10000]\")\r\n\r\ndef encode(item):\r\n return {\"text_emb\": np.random.randn(768).astype(np.float32)}\r\n\r\ndata = data.map(encode)\r\n\r\ndata.add_faiss_index(column=\"text_emb\", string_factory=INDEX_STR, train_size=10_000, device=0)\r\ndata.save_faiss_index(\"text_emb\", INDEX_SAVE_PATH)\r\n```\r\n\r\n## Expected results\r\nSaving the index\r\n\r\n## Actual results\r\nError in void faiss::write_index(const faiss::Index*, faiss::IOWriter*) ... don't know how to serialize this type of index\r\n\r\n## Environment info\r\n- `datasets` version: 1.6.2\r\n- Platform: Linux-4.15.0-142-generic-x86_64-with-glibc2.10\r\n- Python version: 3.8.8\r\n- PyTorch version (GPU?): 1.8.1+cu111 (True)\r\n- Tensorflow version (GPU?): 2.2.0 (False)\r\n- Using GPU in script?: Yes\r\n- Using distributed or parallel set-up in script?: No\r\n\r\n\r\nI will be proposing a fix in a couple of minutes","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2349\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2349","id":888586018,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQxNzYzNzg3","number":2349,"title":"Update task_ids for Ascent KB","user":{"login":"phongnt570","id":6749421,"node_id":"MDQ6VXNlcjY3NDk0MjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6749421?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phongnt570","html_url":"https:\/\/github.com\/phongnt570","followers_url":"https:\/\/api.github.com\/users\/phongnt570\/followers","following_url":"https:\/\/api.github.com\/users\/phongnt570\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phongnt570\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phongnt570\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phongnt570\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phongnt570\/orgs","repos_url":"https:\/\/api.github.com\/users\/phongnt570\/repos","events_url":"https:\/\/api.github.com\/users\/phongnt570\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phongnt570\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-11T20:44:33Z","updated_at":"2021-05-17T10:53:14Z","closed_at":"2021-05-17T10:48:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2349","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2349","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2349.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2349.patch"},"body":"This \"other-other-knowledge-base\" task is better suited for the dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2348\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2348","id":887927737,"node_id":"MDExOlB1bGxSZXF1ZXN0NjQxMTMwOTM4","number":2348,"title":"Add tests for dataset cards","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-11T17:14:27Z","updated_at":"2021-05-21T12:10:47Z","closed_at":"2021-05-21T12:10:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2348","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2348","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2348.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2348.patch"},"body":"Adding tests for dataset cards\r\n\r\nThis PR will potentially remove the scripts being used for dataset tags and readme validation.\r\n\r\nAdditionally, this will allow testing dataset readmes by providing the name as follows:\r\n\r\n```bash\r\npytest tests\/test_dataset_cards.py::test_dataset_tags[fashion_mnist]\r\n```\r\nand\r\n\r\n```bash\r\npytest tests\/test_dataset_cards.py::test_readme_content[fashion_mnist]\r\n```\r\nor a combined test as:\r\n\r\n```bash\r\npytest tests\/test_dataset_cards.py::test_dataset_card[fashion_mnist]\r\n```\r\n@lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2347\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2347","id":887404868,"node_id":"MDU6SXNzdWU4ODc0MDQ4Njg=","number":2347,"title":"Add an API to access the language and pretty name of a dataset","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-05-11T14:10:08Z","updated_at":"2021-05-21T09:26:46Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"It would be super nice to have an API to get some metadata of the dataset from the name and args passed to `load_dataset`. This way we could programmatically infer the language and the name of a dataset when creating model cards automatically in the Transformers examples scripts.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2346\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2346","id":886632114,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM5OTAzMjk3","number":2346,"title":"Add Qasper Dataset","user":{"login":"cceyda","id":15624271,"node_id":"MDQ6VXNlcjE1NjI0Mjcx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15624271?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cceyda","html_url":"https:\/\/github.com\/cceyda","followers_url":"https:\/\/api.github.com\/users\/cceyda\/followers","following_url":"https:\/\/api.github.com\/users\/cceyda\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cceyda\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cceyda\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cceyda\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cceyda\/orgs","repos_url":"https:\/\/api.github.com\/users\/cceyda\/repos","events_url":"https:\/\/api.github.com\/users\/cceyda\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cceyda\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-11T09:25:44Z","updated_at":"2021-05-18T12:28:28Z","closed_at":"2021-05-18T12:28:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2346","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2346","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2346.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2346.patch"},"body":"[Question Answering on Scientific Research Papers](https:\/\/allenai.org\/project\/qasper\/home)\r\n\r\nDoing NLP on NLP papers to do NLP \u267b\ufe0f I had to add it~\r\n\r\n- [x] Add README (just gotta fill out some more )\r\n- [x] Dataloader code\r\n- [x] Make dummy dataset\r\n- [x] generate dataset infos\r\n- [x] Tests\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2345\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2345","id":886586872,"node_id":"MDU6SXNzdWU4ODY1ODY4NzI=","number":2345,"title":"[Question] How to move and reuse preprocessed dataset? ","user":{"login":"AtmaHou","id":15045402,"node_id":"MDQ6VXNlcjE1MDQ1NDAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15045402?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/AtmaHou","html_url":"https:\/\/github.com\/AtmaHou","followers_url":"https:\/\/api.github.com\/users\/AtmaHou\/followers","following_url":"https:\/\/api.github.com\/users\/AtmaHou\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/AtmaHou\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/AtmaHou\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/AtmaHou\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/AtmaHou\/orgs","repos_url":"https:\/\/api.github.com\/users\/AtmaHou\/repos","events_url":"https:\/\/api.github.com\/users\/AtmaHou\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/AtmaHou\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-11T09:09:17Z","updated_at":"2021-06-11T04:39:11Z","closed_at":"2021-06-11T04:39:11Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, I am training a gpt-2 from scratch using run_clm.py.\r\n\r\nI want to move and reuse the preprocessed dataset (It take 2 hour to preprocess),\r\n\r\nI tried to :\r\n\r\ncopy path_to_cache_dir\/datasets to new_cache_dir\/datasets\r\nset export HF_DATASETS_CACHE=\"new_cache_dir\/\"\r\nbut the program still re-preprocess the whole dataset without loading cache.\r\n\r\nI also tried to torch.save(lm_datasets, fw), but the saved file is only 14M.\r\n\r\nWhat is the proper way to do this?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2344\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2344","id":885331505,"node_id":"MDU6SXNzdWU4ODUzMzE1MDU=","number":2344,"title":"Is there a way to join multiple datasets in one?","user":{"login":"alexvaca0","id":35173563,"node_id":"MDQ6VXNlcjM1MTczNTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35173563?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexvaca0","html_url":"https:\/\/github.com\/alexvaca0","followers_url":"https:\/\/api.github.com\/users\/alexvaca0\/followers","following_url":"https:\/\/api.github.com\/users\/alexvaca0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexvaca0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexvaca0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexvaca0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexvaca0\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexvaca0\/repos","events_url":"https:\/\/api.github.com\/users\/alexvaca0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexvaca0\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-10T23:16:10Z","updated_at":"2021-05-11T08:24:48Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\nI need to join 2 datasets, one that is in the hub and another I've created from my files. Is there an easy way to join these 2? \n\n**Describe the solution you'd like**\nId like to join them with a merge or join method, just like pandas dataframes. \n\n**Additional context**\nIf you want to extend an existing dataset with more data, for example for training a language model, you need that functionality. I've not found it in the documentation.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2343\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2343","id":883208539,"node_id":"MDU6SXNzdWU4ODMyMDg1Mzk=","number":2343,"title":"Columns are removed before or after map function applied?","user":{"login":"taghizad3h","id":8199406,"node_id":"MDQ6VXNlcjgxOTk0MDY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8199406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/taghizad3h","html_url":"https:\/\/github.com\/taghizad3h","followers_url":"https:\/\/api.github.com\/users\/taghizad3h\/followers","following_url":"https:\/\/api.github.com\/users\/taghizad3h\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/taghizad3h\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/taghizad3h\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/taghizad3h\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/taghizad3h\/orgs","repos_url":"https:\/\/api.github.com\/users\/taghizad3h\/repos","events_url":"https:\/\/api.github.com\/users\/taghizad3h\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/taghizad3h\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-10T02:36:20Z","updated_at":"2021-05-10T02:36:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nAccording to the documentation when applying map function the [remove_columns ](https:\/\/huggingface.co\/docs\/datasets\/processing.html#removing-columns) will be removed after they are passed to the function, but in the [source code](https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html#datasets.Dataset.map) it's documented that they are removed before applying function. I thinks the source code doc is more accurate, right?\r\n\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2342\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2342","id":882981420,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM2NDg0MzM3","number":2342,"title":"Docs - CER above 1","user":{"login":"borisdayma","id":715491,"node_id":"MDQ6VXNlcjcxNTQ5MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/715491?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/borisdayma","html_url":"https:\/\/github.com\/borisdayma","followers_url":"https:\/\/api.github.com\/users\/borisdayma\/followers","following_url":"https:\/\/api.github.com\/users\/borisdayma\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/borisdayma\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/borisdayma\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/borisdayma\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/borisdayma\/orgs","repos_url":"https:\/\/api.github.com\/users\/borisdayma\/repos","events_url":"https:\/\/api.github.com\/users\/borisdayma\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/borisdayma\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-09T23:41:00Z","updated_at":"2021-05-10T13:34:00Z","closed_at":"2021-05-10T13:34:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2342","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2342","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2342.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2342.patch"},"body":"CER can actually be greater than 1.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2341\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2341","id":882370933,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM1OTExODI2","number":2341,"title":"Added the Ascent KB","user":{"login":"phongnt570","id":6749421,"node_id":"MDQ6VXNlcjY3NDk0MjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6749421?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phongnt570","html_url":"https:\/\/github.com\/phongnt570","followers_url":"https:\/\/api.github.com\/users\/phongnt570\/followers","following_url":"https:\/\/api.github.com\/users\/phongnt570\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phongnt570\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phongnt570\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phongnt570\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phongnt570\/orgs","repos_url":"https:\/\/api.github.com\/users\/phongnt570\/repos","events_url":"https:\/\/api.github.com\/users\/phongnt570\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phongnt570\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-09T14:17:39Z","updated_at":"2021-05-11T09:16:59Z","closed_at":"2021-05-11T09:16:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2341","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2341","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2341.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2341.patch"},"body":"Added the Ascent Commonsense KB of 8.9M assertions.\r\n\r\n- Paper: [Advanced Semantics for Commonsense Knowledge Extraction (WWW'21)](https:\/\/arxiv.org\/abs\/2011.00905)\r\n- Website: https:\/\/ascent.mpi-inf.mpg.de\/\r\n\r\n(I am the author of the dataset)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2340\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2340","id":882370824,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM1OTExNzIx","number":2340,"title":"More consistent copy logic","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-09T14:17:33Z","updated_at":"2021-05-11T08:58:33Z","closed_at":"2021-05-11T08:58:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2340","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2340","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2340.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2340.patch"},"body":"Use `info.copy()` instead of `copy.deepcopy(info)`.\r\n`Features.copy` now creates a deep copy.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2338\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2338","id":882046077,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM1NjA3NzQx","number":2338,"title":"fixed download link for web_science","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-09T09:12:20Z","updated_at":"2021-05-10T13:35:53Z","closed_at":"2021-05-10T13:35:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2338","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2338","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2338.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2338.patch"},"body":"Fixes #2337. Should work with:\r\n`dataset = load_dataset(\"web_of_science\", \"WOS11967\", ignore_verifications=True)`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2337\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2337","id":881610567,"node_id":"MDU6SXNzdWU4ODE2MTA1Njc=","number":2337,"title":"NonMatchingChecksumError for web_of_science dataset","user":{"login":"nbroad1881","id":24982805,"node_id":"MDQ6VXNlcjI0OTgyODA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24982805?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nbroad1881","html_url":"https:\/\/github.com\/nbroad1881","followers_url":"https:\/\/api.github.com\/users\/nbroad1881\/followers","following_url":"https:\/\/api.github.com\/users\/nbroad1881\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nbroad1881\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nbroad1881\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nbroad1881\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nbroad1881\/orgs","repos_url":"https:\/\/api.github.com\/users\/nbroad1881\/repos","events_url":"https:\/\/api.github.com\/users\/nbroad1881\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nbroad1881\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-09T02:02:02Z","updated_at":"2021-05-10T13:35:53Z","closed_at":"2021-05-10T13:35:53Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"NonMatchingChecksumError when trying to download the web_of_science dataset. \r\n\r\n>NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/data.mendeley.com\/datasets\/9rw3vkcfy4\/6\/files\/c9ea673d-5542-44c0-ab7b-f1311f7d61df\/WebOfScience.zip?dl=1']\r\n\r\nSetting `ignore_verfications=True` results in OSError.\r\n\r\n>OSError: Cannot find data file. \r\nOriginal error:\r\n[Errno 20] Not a directory: '\/root\/.cache\/huggingface\/datasets\/downloads\/37ab2c42f50d553c1d0ea432baca3e9e11fedea4aeec63a81e6b7e25dd10d4e7\/WOS5736\/X.txt'\r\n\r\n```python\r\ndataset = load_dataset('web_of_science', 'WOS5736')\r\n```\r\nThere are 3 data instances and they all don't work. 'WOS5736', 'WOS11967', 'WOS46985'\r\n\r\ndatasets 1.6.2\r\npython 3.7.10\r\nUbuntu 18.04.5 LTS","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2336\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2336","id":881298783,"node_id":"MDExOlB1bGxSZXF1ZXN0NjM0ODk1OTU5","number":2336,"title":"Fix overflow issue in interpolation search","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-08T20:51:36Z","updated_at":"2021-05-10T13:29:07Z","closed_at":"2021-05-10T13:26:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2336","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2336","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2336.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2336.patch"},"body":"Fixes #2335 \r\n\r\nMore info about this error can be found [here](https:\/\/stackoverflow.com\/questions\/53239890\/why-do-i-keep-getting-this-error-runtimewarning-overflow-encountered-in-int-sc\/53240100). ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2335\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2335","id":881291887,"node_id":"MDU6SXNzdWU4ODEyOTE4ODc=","number":2335,"title":"Index error in Dataset.map","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-08T20:44:57Z","updated_at":"2021-05-10T13:26:12Z","closed_at":"2021-05-10T13:26:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"The following code, if executed on master, raises an IndexError (due to overflow):\r\n```python\r\n>>> from datasets import *\r\n>>> d = load_dataset(\"bookcorpus\", split=\"train\")\r\nReusing dataset bookcorpus (C:\\Users\\Mario\\.cache\\huggingface\\datasets\\bookcorpus\\plain_text\\1.0.0\\44662c4a114441c35200992bea923b170e6f13f2f0beb7c14e43759cec498700)\r\n2021-05-08 21:23:46.859818: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:48] Successfully opened dynamic library cudart64_101.dll\r\n>>> d.map(lambda ex: ex)\r\n 0%|\u258e | 289430\/74004228 [00:13<58:41, 20935.33ex\/s]c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\table.py:84: RuntimeWarning: overflow encountered in int_scalars\r\n k = i + ((j - i) * (x - arr[i]) \/\/ (arr[j] - arr[i]))\r\n 0%|\u258e | 290162\/74004228 [00:13<59:11, 20757.23ex\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 1498, in map\r\n new_fingerprint=new_fingerprint,\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 174, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\fingerprint.py\", line 340, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 1799, in _map_single\r\n for i, example in enumerate(pbar):\r\n File \"C:\\Users\\Mario\\Anaconda3\\envs\\hf-datasets\\lib\\site-packages\\tqdm\\std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 1145, in __iter__\r\n format_kwargs=format_kwargs,\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\arrow_dataset.py\", line 1337, in _getitem\r\n pa_subtable = query_table(self._data, key, indices=self._indices if self._indices is not None else None)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\formatting\\formatting.py\", line 368, in query_table\r\n pa_subtable = _query_table(table, key)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\formatting\\formatting.py\", line 79, in _query_table\r\n return table.fast_slice(key % table.num_rows, 1)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\table.py\", line 128, in fast_slice\r\n i = _interpolation_search(self._offsets, offset)\r\n File \"c:\\users\\mario\\desktop\\projects\\datasets-1\\src\\datasets\\table.py\", line 91, in _interpolation_search\r\n raise IndexError(f\"Invalid query '{x}' for size {arr[-1] if len(arr) else 'none'}.\")\r\nIndexError: Invalid query '290162' for size 74004228.\r\n```\r\nTested on Windows, can run on Linux if needed.\r\n\r\nEDIT:\r\nIt seems like for this to happen, the default NumPy dtype has to be np.int32.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2334\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2334","id":879810107,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMzNTAzNTEw","number":2334,"title":"Updating the DART file checksums in GEM","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-07T21:53:44Z","updated_at":"2021-05-07T22:18:10Z","closed_at":"2021-05-07T22:18:10Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2334","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2334","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2334.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2334.patch"},"body":"The DART files were just updated on the source GitHub\r\n\r\nhttps:\/\/github.com\/Yale-LILY\/dart\/commit\/34b3c872da4811523e334f1631e54ca8105dffab","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2333\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2333","id":879214067,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMyOTUwNzIy","number":2333,"title":"Fix duplicate keys","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-07T15:28:08Z","updated_at":"2021-05-08T21:47:31Z","closed_at":"2021-05-07T15:57:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2333","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2333","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2333.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2333.patch"},"body":"As noticed in https:\/\/github.com\/huggingface\/datasets\/pull\/2245, many datasets yield duplicate keys.\r\nMost of the time it was because the counter used for ids were reset at each new data file.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2332\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2332","id":879041608,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMyNzk1NDE4","number":2332,"title":"Add note about indices mapping in save_to_disk docstring","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-07T13:49:42Z","updated_at":"2021-05-07T17:20:48Z","closed_at":"2021-05-07T17:20:48Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2332","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2332","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2332.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2332.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2331\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2331","id":879031427,"node_id":"MDU6SXNzdWU4NzkwMzE0Mjc=","number":2331,"title":"Add Topical-Chat","user":{"login":"ktangri","id":22266659,"node_id":"MDQ6VXNlcjIyMjY2NjU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22266659?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ktangri","html_url":"https:\/\/github.com\/ktangri","followers_url":"https:\/\/api.github.com\/users\/ktangri\/followers","following_url":"https:\/\/api.github.com\/users\/ktangri\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ktangri\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ktangri\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ktangri\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ktangri\/orgs","repos_url":"https:\/\/api.github.com\/users\/ktangri\/repos","events_url":"https:\/\/api.github.com\/users\/ktangri\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ktangri\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-07T13:43:59Z","updated_at":"2021-05-07T13:43:59Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Topical-Chat\r\n- **Description:** a knowledge-grounded human-human conversation dataset where the underlying knowledge spans 8 broad topics and conversation partners don\u2019t have explicitly defined roles\r\n- **Paper:** https:\/\/www.isca-speech.org\/archive\/Interspeech_2019\/pdfs\/3079.pdf\r\n- **Data:** https:\/\/github.com\/alexa\/Topical-Chat\r\n- **Motivation:** Good quality, knowledge-grounded dataset that spans a broad range of topics\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2330\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2330","id":878490927,"node_id":"MDU6SXNzdWU4Nzg0OTA5Mjc=","number":2330,"title":"Allow passing `desc` to `tqdm` in `Dataset.map()`","user":{"login":"cccntu","id":31893406,"node_id":"MDQ6VXNlcjMxODkzNDA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31893406?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cccntu","html_url":"https:\/\/github.com\/cccntu","followers_url":"https:\/\/api.github.com\/users\/cccntu\/followers","following_url":"https:\/\/api.github.com\/users\/cccntu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cccntu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cccntu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cccntu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cccntu\/orgs","repos_url":"https:\/\/api.github.com\/users\/cccntu\/repos","events_url":"https:\/\/api.github.com\/users\/cccntu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cccntu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-07T05:52:54Z","updated_at":"2021-05-26T14:59:21Z","closed_at":"2021-05-26T14:59:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"It's normal to have many `map()` calls, and some of them can take a few minutes,\r\nit would be nice to have a description on the progress bar.\r\n\r\nAlternative solution:\r\nPrint the description before\/after the `map()` call.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2329\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2329","id":877924198,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMxODA3MTk0","number":2329,"title":"Add cache dir for in-memory datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-05-06T19:35:32Z","updated_at":"2021-06-08T19:46:48Z","closed_at":"2021-06-08T19:06:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2329","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2329","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2329.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2329.patch"},"body":"Adds the cache dir attribute to DatasetInfo as suggested by @lhoestq.\r\n\r\nShould fix #2322 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2328\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2328","id":877673896,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMxNTg2MzU2","number":2328,"title":"Add Matthews\/Pearson\/Spearman correlation metrics","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-06T16:09:27Z","updated_at":"2021-05-06T16:58:10Z","closed_at":"2021-05-06T16:58:10Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2328","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2328","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2328.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2328.patch"},"body":"Added three metrics:\r\n- The Matthews correlation coefficient (from sklearn)\r\n- The Pearson correlation coefficient (from scipy)\r\n- The Spearman correlation coefficient (from scipy)\r\n\r\ncc @sgugger ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2327\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2327","id":877565831,"node_id":"MDU6SXNzdWU4Nzc1NjU4MzE=","number":2327,"title":"A syntax error in example","user":{"login":"mymusise","id":6883957,"node_id":"MDQ6VXNlcjY4ODM5NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6883957?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mymusise","html_url":"https:\/\/github.com\/mymusise","followers_url":"https:\/\/api.github.com\/users\/mymusise\/followers","following_url":"https:\/\/api.github.com\/users\/mymusise\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mymusise\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mymusise\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mymusise\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mymusise\/orgs","repos_url":"https:\/\/api.github.com\/users\/mymusise\/repos","events_url":"https:\/\/api.github.com\/users\/mymusise\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mymusise\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-06T14:34:44Z","updated_at":"2021-05-20T03:04:19Z","closed_at":"2021-05-20T03:04:19Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/6883957\/117315905-b47a5c00-aeba-11eb-91eb-b2a4a0212a56.png)\r\n\r\nSorry to report with an image, I can't find the template source code of this snippet.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2326\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2326","id":876829254,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwODk3MjI4","number":2326,"title":"Enable auto-download for PAN-X \/ Wikiann domain in XTREME","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-05T20:58:38Z","updated_at":"2021-05-07T08:41:10Z","closed_at":"2021-05-07T08:41:10Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2326","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2326","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2326.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2326.patch"},"body":"This PR replaces the manual download of the `PAN-X.lang` domains with an auto-download from a Dropbox link provided by the Wikiann author. We also add the relevant dummy data for these domains.\r\n\r\nWhile re-generating `dataset_infos.json` I ran into a `KeyError` in the `udpos.Arabic` domain so have included a fix for this as well.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2325\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2325","id":876653121,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwNzU1MzIx","number":2325,"title":"Added the HLGD dataset","user":{"login":"tingofurro","id":2609265,"node_id":"MDQ6VXNlcjI2MDkyNjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2609265?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tingofurro","html_url":"https:\/\/github.com\/tingofurro","followers_url":"https:\/\/api.github.com\/users\/tingofurro\/followers","following_url":"https:\/\/api.github.com\/users\/tingofurro\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tingofurro\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tingofurro\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tingofurro\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tingofurro\/orgs","repos_url":"https:\/\/api.github.com\/users\/tingofurro\/repos","events_url":"https:\/\/api.github.com\/users\/tingofurro\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tingofurro\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-05T16:53:29Z","updated_at":"2021-05-12T14:55:13Z","closed_at":"2021-05-12T14:16:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2325","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2325","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2325.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2325.patch"},"body":"Added the Headline Grouping Dataset (HLGD), from the NAACL2021 paper: News Headline Grouping as a Challenging NLU Task\r\nDataset Link: https:\/\/github.com\/tingofurro\/headline_grouping\r\nPaper link: https:\/\/people.eecs.berkeley.edu\/~phillab\/pdfs\/NAACL2021_HLG.pdf","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2324\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2324","id":876602064,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwNzE1NTQz","number":2324,"title":"Create Audio feature","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":5,"closed_issues":2,"state":"open","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-08-04T17:03:52Z","due_on":"2021-08-21T07:00:00Z","closed_at":null},"comments":2,"created_at":"2021-05-05T15:55:22Z","updated_at":"2021-07-21T15:36:06Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2324","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2324","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2324.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2324.patch"},"body":"Create `Audio` feature to handle raw audio files.\r\n\r\nSome decisions to be further discussed:\r\n- I have chosen `soundfile` as the audio library; another interesting library is `librosa`, but this requires `soundfile` (see [here](https:\/\/github.com\/librosa\/librosa\/blob\/main\/setup.cfg#L53)). If we require some more advanced functionalities, we could eventually switch the library.\r\n- I have implemented the audio feature as an extra: `pip install datasets[audio]`. For the moment, the typical datasets user uses only text datasets, and there is no need for them for additional package requirements for audio\/image if they do not need them.\r\n- For tests, I require audio dependencies (so that all audio functionalities are checked with our CI test suite); I exclude Linux platforms, which require an additional library to be installed with the distribution package manager\r\n - I also require `pytest-datadir`, which allow to have (audio) data files for tests\r\n- The audio data contain: array and sample_rate.\r\n- The array is reshaped as 1D array (expected input for `Wav2Vec2`).\r\n\r\nNote that to install `soundfile` on Linux, you need to install `libsndfile` using your distribution\u2019s package manager, for example `sudo apt-get install libsndfile1`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2323\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2323","id":876438507,"node_id":"MDU6SXNzdWU4NzY0Mzg1MDc=","number":2323,"title":"load_dataset(\"timit_asr\") gives back duplicates of just one sample text","user":{"login":"ekeleshian","id":33647474,"node_id":"MDQ6VXNlcjMzNjQ3NDc0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33647474?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ekeleshian","html_url":"https:\/\/github.com\/ekeleshian","followers_url":"https:\/\/api.github.com\/users\/ekeleshian\/followers","following_url":"https:\/\/api.github.com\/users\/ekeleshian\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ekeleshian\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ekeleshian\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ekeleshian\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ekeleshian\/orgs","repos_url":"https:\/\/api.github.com\/users\/ekeleshian\/repos","events_url":"https:\/\/api.github.com\/users\/ekeleshian\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ekeleshian\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-05-05T13:14:48Z","updated_at":"2021-05-07T10:32:30Z","closed_at":"2021-05-07T10:32:30Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen you look up on key [\"train\"] and then ['text'], you get back a list with just one sentence duplicated 4620 times. Namely, the sentence \"Would such an act of refusal be useful?\". Similarly when you look up ['test'] and then ['text'], the list is one sentence repeated \"The bungalow was pleasantly situated near the shore.\" 1680 times. \r\n\r\nI tried to work around the issue by downgrading to datasets version 1.3.0, inspired by [this post](https:\/\/www.gitmemory.com\/issue\/huggingface\/datasets\/2052\/798904836) and removing the entire huggingface directory from ~\/.cache, but I still get the same issue. \r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\ntimit = load_dataset(\"timit_asr\")\r\nprint(timit['train']['text'])\r\nprint(timit['test']['text'])\r\n```\r\n\r\n## Expected Result\r\nRows of diverse text, like how it is shown in the [wav2vec2.0 tutorial](https:\/\/colab.research.google.com\/github\/patrickvonplaten\/notebooks\/blob\/master\/Fine_tuning_Wav2Vec2_for_English_ASR.ipynb)\r\n\"Screen\r\n\r\n\r\n## Actual results\r\nRows of repeated text.\r\n\"Screen\r\n\r\n\r\n## Versions\r\n- Datasets: 1.3.0\r\n- Python: 3.9.1\r\n- Platform: macOS-11.2.1-x86_64-i386-64bit}\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2322","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2322\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2322\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2322\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2322","id":876383853,"node_id":"MDU6SXNzdWU4NzYzODM4NTM=","number":2322,"title":"Calls to map are not cached.","user":{"login":"villmow","id":2743060,"node_id":"MDQ6VXNlcjI3NDMwNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2743060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/villmow","html_url":"https:\/\/github.com\/villmow","followers_url":"https:\/\/api.github.com\/users\/villmow\/followers","following_url":"https:\/\/api.github.com\/users\/villmow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/villmow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/villmow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/villmow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/villmow\/orgs","repos_url":"https:\/\/api.github.com\/users\/villmow\/repos","events_url":"https:\/\/api.github.com\/users\/villmow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/villmow\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-05-05T12:11:27Z","updated_at":"2021-06-08T19:10:02Z","closed_at":"2021-06-08T19:08:21Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nSomehow caching does not work for me anymore. Am I doing something wrong, or is there anything that I missed?\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n\r\nimport datasets\r\ndatasets.set_caching_enabled(True)\r\nsst = datasets.load_dataset(\"sst\")\r\n\r\ndef foo(samples, i):\r\n print(\"executed\", i[:10])\r\n return samples\r\n\r\n# first call\r\nx = sst.map(foo, batched=True, with_indices=True, num_proc=2)\r\n\r\nprint('\\n'*3, \"#\" * 30, '\\n'*3)\r\n\r\n# second call\r\ny = sst.map(foo, batched=True, with_indices=True, num_proc=2)\r\n\r\n# print version\r\nimport sys\r\nimport platform\r\nprint(f\"\"\"\r\n- Datasets: {datasets.__version__}\r\n- Python: {sys.version}\r\n- Platform: {platform.platform()}\r\n\"\"\")\r\n```\r\n\r\n## Actual results\r\nThis code prints the following output for me:\r\n```bash\r\nNo config specified, defaulting to: sst\/default\r\nReusing dataset sst (\/home\/johannes\/.cache\/huggingface\/datasets\/sst\/default\/1.0.0\/b8a7889ef01c5d3ae8c379b84cc4080f8aad3ac2bc538701cbe0ac6416fb76ff)\r\n#0: 0%| | 0\/5 [00:00>> from datasets import load_dataset\r\n>>> dataset = load_dataset(\"oscar\", \"unshuffled_deduplicated_af\")\r\nDownloading: 14.7kB [00:00, 4.91MB\/s]\r\nDownloading: 3.07MB [00:00, 32.6MB\/s]\r\nDownloading and preparing dataset oscar\/unshuffled_deduplicated_af (download: 62.93 MiB, generated: 163.38 MiB, post-processed: Unknown size, total: 226.32 MiB) to C:\\Users\\sgraaf\\.cache\\huggingface\\datasets\\oscar\\unshuffled_deduplicated_af\\1.0.0\\bd4f96df5b4512007ef9fd17bbc1ecde459fa53d2fc0049cf99392ba2efcc464...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 81.0\/81.0 [00:00<00:00, 40.5kB\/s]\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 66.0M\/66.0M [00:18<00:00, 3.50MB\/s]\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\load.py\", line 745, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\builder.py\", line 574, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\builder.py\", line 652, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\datasets\\builder.py\", line 979, in _prepare_split\r\n for key, record in utils.tqdm(\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\site-packages\\tqdm\\std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"C:\\Users\\sgraaf\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\oscar\\bd4f96df5b4512007ef9fd17bbc1ecde459fa53d2fc0049cf99392ba2efcc464\\oscar.py\", line 359, in _generate_examples\r\n for line in f:\r\n File \"C:\\Users\\sgraaf\\AppData\\Local\\Programs\\Python\\Python39\\lib\\encodings\\cp1252.py\", line 23, in decode\r\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\r\nUnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 7454: character maps to \r\n```\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n```python\r\nimport datasets\r\nimport sys\r\nimport platform\r\n\r\nprint(f\"\"\"\r\n- Datasets: {datasets.__version__}\r\n- Python: {sys.version}\r\n- Platform: {platform.platform()}\r\n\"\"\")\r\n```\r\n- Datasets: 1.6.2\r\n- Python: 3.9.4 (tags\/v3.9.4:1f2e308, Apr 6 2021, 13:40:21) [MSC v.1928 64 bit (AMD64)]\r\n- Platform: Windows-10-10.0.19041-SP0","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2318\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2318","id":876212460,"node_id":"MDU6SXNzdWU4NzYyMTI0NjA=","number":2318,"title":"[api request] API to obtain \"dataset_module\" dynamic path?","user":{"login":"richardliaw","id":4529381,"node_id":"MDQ6VXNlcjQ1MjkzODE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4529381?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/richardliaw","html_url":"https:\/\/github.com\/richardliaw","followers_url":"https:\/\/api.github.com\/users\/richardliaw\/followers","following_url":"https:\/\/api.github.com\/users\/richardliaw\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/richardliaw\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/richardliaw\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/richardliaw\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/richardliaw\/orgs","repos_url":"https:\/\/api.github.com\/users\/richardliaw\/repos","events_url":"https:\/\/api.github.com\/users\/richardliaw\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/richardliaw\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-05-05T08:40:48Z","updated_at":"2021-05-06T08:45:45Z","closed_at":"2021-05-06T07:57:54Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\nA clear and concise description of what the problem is.\r\n\r\nThis is an awesome library. \r\n\r\nIt seems like the dynamic module path in this library has broken some of hyperparameter tuning functionality: https:\/\/discuss.huggingface.co\/t\/using-hyperparameter-search-in-trainer\/785\/34\r\n\r\nThis is because Ray will spawn new processes, and each process will load modules by path. However, we need to explicitly inform Ray to load the right modules, or else it will error upon import. \r\n\r\nI'd like an API to obtain the dynamic paths. This will allow us to support this functionality in this awesome library while being future proof.\r\n\r\n**Describe the solution you'd like**\r\nA clear and concise description of what you want to happen.\r\n\r\n`datasets.get_dynamic_paths -> List[str]` will be sufficient for my use case.\r\n\r\nBy offering this API, we will be able to address the following issues (by patching the ray integration sufficiently):\r\n\r\nhttps:\/\/github.com\/huggingface\/blog\/issues\/106\r\nhttps:\/\/github.com\/huggingface\/transformers\/issues\/11565\r\nhttps:\/\/discuss.huggingface.co\/t\/using-hyperparameter-search-in-trainer\/785\/34\r\nhttps:\/\/discuss.huggingface.co\/t\/using-hyperparameter-search-in-trainer\/785\/35\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2317\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2317","id":875767318,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwMDQxNzc4","number":2317,"title":"Fix incorrect version specification for the pyarrow package","user":{"login":"cemilcengiz","id":32267027,"node_id":"MDQ6VXNlcjMyMjY3MDI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32267027?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cemilcengiz","html_url":"https:\/\/github.com\/cemilcengiz","followers_url":"https:\/\/api.github.com\/users\/cemilcengiz\/followers","following_url":"https:\/\/api.github.com\/users\/cemilcengiz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cemilcengiz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cemilcengiz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cemilcengiz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cemilcengiz\/orgs","repos_url":"https:\/\/api.github.com\/users\/cemilcengiz\/repos","events_url":"https:\/\/api.github.com\/users\/cemilcengiz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cemilcengiz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-04T19:30:20Z","updated_at":"2021-05-05T10:09:16Z","closed_at":"2021-05-05T09:21:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2317","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2317","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2317.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2317.patch"},"body":"This PR addresses the bug in the pyarrow version specification, which is detailed in #2316 .\r\nSimply, I put a comma between the version bounds.\r\n\r\nFix #2316.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2316\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2316","id":875756353,"node_id":"MDU6SXNzdWU4NzU3NTYzNTM=","number":2316,"title":"Incorrect version specification for pyarrow","user":{"login":"cemilcengiz","id":32267027,"node_id":"MDQ6VXNlcjMyMjY3MDI3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32267027?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cemilcengiz","html_url":"https:\/\/github.com\/cemilcengiz","followers_url":"https:\/\/api.github.com\/users\/cemilcengiz\/followers","following_url":"https:\/\/api.github.com\/users\/cemilcengiz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cemilcengiz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cemilcengiz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cemilcengiz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cemilcengiz\/orgs","repos_url":"https:\/\/api.github.com\/users\/cemilcengiz\/repos","events_url":"https:\/\/api.github.com\/users\/cemilcengiz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cemilcengiz\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T19:15:11Z","updated_at":"2021-05-05T10:10:03Z","closed_at":"2021-05-05T10:10:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nThe pyarrow dependency is incorrectly specified in setup.py file, in [this line](https:\/\/github.com\/huggingface\/datasets\/blob\/3a3e5a4da20bfcd75f8b6a6869b240af8feccc12\/setup.py#L77).\r\nAlso as a snippet:\r\n```python\r\n \"pyarrow>=1.0.0<4.0.0\",\r\n```\r\n\r\n## Steps to reproduce the bug\r\n```bash\r\n pip install \"pyarrow>=1.0.0<4.0.0\"\r\n```\r\n\r\n## Expected results\r\nIt is expected to get a pyarrow version between 1.0.0 (inclusive) and 4.0.0 (exclusive).\r\n\r\n## Actual results\r\npip ignores the specified versions since there is a missing comma between the lower and upper limits. Therefore, pip installs the latest pyarrow version from PYPI, which is 4.0.0.\r\nThis is especially problematic since \"conda env export\" fails due to incorrect version specification. Here is the conda error as well:\r\n```bash\r\nconda env export\r\nInvalidVersionSpec: Invalid version '1.0.0<4.0.0': invalid character(s)\r\n```\r\n\r\n\r\n## Fix suggestion\r\nPut a comma between the version limits which means replacing the line in setup.py file with the following:\r\n```python\r\n \"pyarrow>=1.0.0,<4.0.0\",\r\n```\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n```python\r\n- Datasets: 1.6.2\r\n- Python: 3.7.10 (default, Feb 26 2021, 18:47:35) \r\n[GCC 7.3.0]\r\n- Platform: Linux-5.4.0-42-generic-x86_64-with-debian-buster-sid\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2315\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2315","id":875742200,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwMDIyMDYy","number":2315,"title":"Datasets cli improvements","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T18:55:11Z","updated_at":"2021-05-10T16:36:51Z","closed_at":"2021-05-10T16:36:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2315","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2315","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2315.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2315.patch"},"body":"This PR:\r\n* replaces the code from the `bug_report.md` that was used to get relevant system info with a dedicated command (a more elegant approach than copy-pasting the code IMO)\r\n* removes the `download` command (copied from the transformers repo?)\r\n* adds missing help messages to the cli commands\r\n\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2314\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2314","id":875729271,"node_id":"MDExOlB1bGxSZXF1ZXN0NjMwMDExODc4","number":2314,"title":"Minor refactor prepare_module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T18:37:26Z","updated_at":"2021-07-16T06:59:59Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2314","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2314","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2314.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2314.patch"},"body":"Start to refactor `prepare_module` to try to decouple functionality.\r\n\r\nThis PR does:\r\n- extract function `_initialize_dynamic_modules_namespace_package`\r\n- extract function `_find_module_in_github_or_s3`\r\n- some renaming of variables\r\n- use of f-strings","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2313\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2313","id":875475367,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5ODEwNTc4","number":2313,"title":"Remove unused head_hf_s3 function","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-04T13:42:06Z","updated_at":"2021-05-07T09:31:42Z","closed_at":"2021-05-07T09:31:42Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2313","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2313","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2313.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2313.patch"},"body":"Currently, the function `head_hf_s3` is not used:\r\n- neither its returned result is used\r\n- nor it raises any exception, as exceptions are catched and returned (not raised)\r\n\r\nThis PR removes it.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2312\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2312","id":875435726,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5Nzc4NjUz","number":2312,"title":"Add rename_columnS method","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T12:57:53Z","updated_at":"2021-05-04T13:43:13Z","closed_at":"2021-05-04T13:43:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2312","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2312","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2312.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2312.patch"},"body":"Cherry-picked from #2255 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2311\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2311","id":875262208,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5NjQwNTMx","number":2311,"title":"Add SLR52, SLR53 and SLR54 to OpenSLR","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-04T09:08:03Z","updated_at":"2021-05-07T09:50:55Z","closed_at":"2021-05-07T09:50:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2311","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2311","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2311.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2311.patch"},"body":"Add large speech datasets for Sinhala, Bengali and Nepali.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2310\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2310","id":875096051,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5NTEwNTg5","number":2310,"title":"Update README.md","user":{"login":"cryoff","id":15029054,"node_id":"MDQ6VXNlcjE1MDI5MDU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15029054?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cryoff","html_url":"https:\/\/github.com\/cryoff","followers_url":"https:\/\/api.github.com\/users\/cryoff\/followers","following_url":"https:\/\/api.github.com\/users\/cryoff\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cryoff\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cryoff\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cryoff\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cryoff\/orgs","repos_url":"https:\/\/api.github.com\/users\/cryoff\/repos","events_url":"https:\/\/api.github.com\/users\/cryoff\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cryoff\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-05-04T04:38:01Z","updated_at":"2021-05-04T06:35:59Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2310","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2310","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2310.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2310.patch"},"body":"Provides description of data instances and dataset features","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2309\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2309","id":874644990,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI5MTU4NjQx","number":2309,"title":"Fix conda release","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-03T14:52:59Z","updated_at":"2021-05-03T16:01:17Z","closed_at":"2021-05-03T16:01:17Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2309","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2309","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2309.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2309.patch"},"body":"There were a few issues with conda releases (they've been failing for a while now).\r\nTo fix this I had to:\r\n- add the --single-version-externally-managed tag to the build stage (suggestion from [here](https:\/\/stackoverflow.com\/a\/64825075))\r\n- set the python version of the conda build stage to 3.8 since 3.9 isn't supported\r\n- sync the evrsion requirement of `huggingface_hub`\r\n\r\nWith these changes I'm working on uploading all missing versions until 1.6.2 to conda\r\n\r\nEDIT: I managed to build and upload all missing versions until 1.6.2 to conda :)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2308\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2308","id":874559846,"node_id":"MDU6SXNzdWU4NzQ1NTk4NDY=","number":2308,"title":"Add COCO evaluation metrics","user":{"login":"NielsRogge","id":48327001,"node_id":"MDQ6VXNlcjQ4MzI3MDAx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48327001?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NielsRogge","html_url":"https:\/\/github.com\/NielsRogge","followers_url":"https:\/\/api.github.com\/users\/NielsRogge\/followers","following_url":"https:\/\/api.github.com\/users\/NielsRogge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NielsRogge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NielsRogge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NielsRogge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NielsRogge\/orgs","repos_url":"https:\/\/api.github.com\/users\/NielsRogge\/repos","events_url":"https:\/\/api.github.com\/users\/NielsRogge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NielsRogge\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-03T13:08:05Z","updated_at":"2021-06-04T07:11:27Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I'm currently working on adding Facebook AI's DETR model (end-to-end object detection with Transformers) to HuggingFace Transformers. The model is working fine, but regarding evaluation, I'm currently relying on external `CocoEvaluator` and `PanopticEvaluator` objects which are defined in the original repository ([here](https:\/\/github.com\/facebookresearch\/detr\/blob\/a54b77800eb8e64e3ad0d8237789fcbf2f8350c5\/datasets\/coco_eval.py#L22) and [here](https:\/\/github.com\/facebookresearch\/detr\/blob\/a54b77800eb8e64e3ad0d8237789fcbf2f8350c5\/datasets\/panoptic_eval.py#L13) respectively). \r\n\r\nRunning these in a notebook gives you nice summaries like this:\r\n![image](https:\/\/user-images.githubusercontent.com\/48327001\/116878842-326f0680-ac20-11eb-9061-d6da02193694.png)\r\n\r\nIt would be great if we could import these metrics from the Datasets library, something like this:\r\n\r\n```\r\nimport datasets\r\n\r\nmetric = datasets.load_metric('coco')\r\n\r\nfor model_input, gold_references in evaluation_dataset:\r\n model_predictions = model(model_inputs)\r\n metric.add_batch(predictions=model_predictions, references=gold_references)\r\n\r\nfinal_score = metric.compute()\r\n```\r\n\r\nI think this would be great for object detection and semantic\/panoptic segmentation in general, not just for DETR. Reproducing results of object detection papers would be way easier.\r\n\r\nHowever, object detection and panoptic segmentation evaluation is a bit more complex than accuracy (it's more like a summary of metrics at different thresholds rather than a single one). I'm not sure how to proceed here, but happy to help making this possible.\r\n\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2302\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2302","id":873961435,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI4NjIzMDQ3","number":2302,"title":"Add SubjQA dataset","user":{"login":"lewtun","id":26859204,"node_id":"MDQ6VXNlcjI2ODU5MjA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26859204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lewtun","html_url":"https:\/\/github.com\/lewtun","followers_url":"https:\/\/api.github.com\/users\/lewtun\/followers","following_url":"https:\/\/api.github.com\/users\/lewtun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lewtun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lewtun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lewtun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lewtun\/orgs","repos_url":"https:\/\/api.github.com\/users\/lewtun\/repos","events_url":"https:\/\/api.github.com\/users\/lewtun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lewtun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-05-02T14:51:20Z","updated_at":"2021-05-10T09:21:19Z","closed_at":"2021-05-10T09:21:19Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2302","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2302","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2302.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2302.patch"},"body":"Hello datasetters \ud83d\ude42!\r\n\r\nHere's an interesting dataset about extractive question-answering on _subjective_ product \/ restaurant reviews. It's quite challenging for models fine-tuned on SQuAD and provides a nice example of domain adaptation (i.e. fine-tuning a SQuAD model on this domain gives better performance).\r\n\r\nI found a bug in the start\/end indices that I've proposed a fix for here: https:\/\/github.com\/megagonlabs\/SubjQA\/pull\/2\r\n\r\nUnfortunately, the dataset creators are unresponsive, so for now I am using my fork as the source. Will update the URL if\/when the creators respond.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2301\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2301","id":873941266,"node_id":"MDU6SXNzdWU4NzM5NDEyNjY=","number":2301,"title":"Unable to setup dev env on Windows","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-02T13:20:42Z","updated_at":"2021-05-03T15:18:01Z","closed_at":"2021-05-03T15:17:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi\r\n\r\nI tried installing the `\".[dev]\"` version on Windows 10 after cloning.\r\n\r\nHere is the error I'm facing:\r\n\r\n```bat\r\n(env) C:\\testing\\datasets>pip install -e \".[dev]\"\r\nObtaining file:\/\/\/C:\/testing\/datasets\r\nRequirement already satisfied: numpy>=1.17 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.19.5)\r\nCollecting pyarrow>=0.17.1\r\n Using cached pyarrow-4.0.0-cp37-cp37m-win_amd64.whl (13.3 MB)\r\nRequirement already satisfied: dill in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (0.3.1.1)\r\nCollecting pandas\r\n Using cached pandas-1.2.4-cp37-cp37m-win_amd64.whl (9.1 MB)\r\nRequirement already satisfied: requests>=2.19.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (2.25.1)\r\nRequirement already satisfied: tqdm<4.50.0,>=4.27 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (4.49.0)\r\nRequirement already satisfied: xxhash in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (2.0.2)\r\nCollecting multiprocess\r\n Using cached multiprocess-0.70.11.1-py37-none-any.whl (108 kB)\r\nRequirement already satisfied: fsspec in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (2021.4.0)\r\nCollecting huggingface_hub<0.1.0\r\n Using cached huggingface_hub-0.0.8-py3-none-any.whl (34 kB)\r\nRequirement already satisfied: importlib_metadata in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (4.0.1)\r\nRequirement already satisfied: absl-py in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (0.12.0)\r\nRequirement already satisfied: pytest in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (6.2.3)\r\nCollecting pytest-xdist\r\n Using cached pytest_xdist-2.2.1-py3-none-any.whl (37 kB)\r\nCollecting apache-beam>=2.24.0\r\n Using cached apache_beam-2.29.0-cp37-cp37m-win_amd64.whl (3.7 MB)\r\nCollecting elasticsearch\r\n Using cached elasticsearch-7.12.1-py2.py3-none-any.whl (339 kB)\r\nRequirement already satisfied: boto3==1.16.43 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.16.43)\r\nRequirement already satisfied: botocore==1.19.43 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.19.43)\r\nCollecting moto[s3]==1.3.16\r\n Using cached moto-1.3.16-py2.py3-none-any.whl (879 kB)\r\nCollecting rarfile>=4.0\r\n Using cached rarfile-4.0-py3-none-any.whl (28 kB)\r\nCollecting tensorflow>=2.3\r\n Using cached tensorflow-2.4.1-cp37-cp37m-win_amd64.whl (370.7 MB)\r\nRequirement already satisfied: torch in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.8.1)\r\nRequirement already satisfied: transformers in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (4.5.1)\r\nCollecting bs4\r\n Using cached bs4-0.0.1-py3-none-any.whl\r\nCollecting conllu\r\n Using cached conllu-4.4-py2.py3-none-any.whl (15 kB)\r\nCollecting langdetect\r\n Using cached langdetect-1.0.8-py3-none-any.whl\r\nCollecting lxml\r\n Using cached lxml-4.6.3-cp37-cp37m-win_amd64.whl (3.5 MB)\r\nCollecting mwparserfromhell\r\n Using cached mwparserfromhell-0.6-cp37-cp37m-win_amd64.whl (101 kB)\r\nCollecting nltk\r\n Using cached nltk-3.6.2-py3-none-any.whl (1.5 MB)\r\nCollecting openpyxl\r\n Using cached openpyxl-3.0.7-py2.py3-none-any.whl (243 kB)\r\nCollecting py7zr\r\n Using cached py7zr-0.15.2-py3-none-any.whl (66 kB)\r\nCollecting tldextract\r\n Using cached tldextract-3.1.0-py2.py3-none-any.whl (87 kB)\r\nCollecting zstandard\r\n Using cached zstandard-0.15.2-cp37-cp37m-win_amd64.whl (582 kB)\r\nCollecting bert_score>=0.3.6\r\n Using cached bert_score-0.3.9-py3-none-any.whl (59 kB)\r\nCollecting rouge_score\r\n Using cached rouge_score-0.0.4-py2.py3-none-any.whl (22 kB)\r\nCollecting sacrebleu\r\n Using cached sacrebleu-1.5.1-py3-none-any.whl (54 kB)\r\nRequirement already satisfied: scipy in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.6.3)\r\nCollecting seqeval\r\n Using cached seqeval-1.2.2-py3-none-any.whl\r\nCollecting sklearn\r\n Using cached sklearn-0.0-py2.py3-none-any.whl\r\nCollecting jiwer\r\n Using cached jiwer-2.2.0-py3-none-any.whl (13 kB)\r\nRequirement already satisfied: toml>=0.10.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (0.10.2)\r\nRequirement already satisfied: requests_file>=1.5.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.5.1)\r\nRequirement already satisfied: texttable>=1.6.3 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.6.3)\r\nRequirement already satisfied: s3fs>=0.4.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (0.4.2)\r\nRequirement already satisfied: Werkzeug>=1.0.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from datasets==1.5.0.dev0) (1.0.1)\r\nCollecting black\r\n Using cached black-21.4b2-py3-none-any.whl (130 kB)\r\nCollecting isort\r\n Using cached isort-5.8.0-py3-none-any.whl (103 kB)\r\nCollecting flake8==3.7.9\r\n Using cached flake8-3.7.9-py2.py3-none-any.whl (69 kB)\r\nRequirement already satisfied: jmespath<1.0.0,>=0.7.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from boto3==1.16.43->datasets==1.5.0.dev0) (0.10.0)\r\nRequirement already satisfied: s3transfer<0.4.0,>=0.3.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from boto3==1.16.43->datasets==1.5.0.dev0) (0.3.7)\r\nRequirement already satisfied: urllib3<1.27,>=1.25.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from botocore==1.19.43->datasets==1.5.0.dev0) (1.26.4)\r\nRequirement already satisfied: python-dateutil<3.0.0,>=2.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from botocore==1.19.43->datasets==1.5.0.dev0) (2.8.1)\r\nCollecting entrypoints<0.4.0,>=0.3.0\r\n Using cached entrypoints-0.3-py2.py3-none-any.whl (11 kB)\r\nCollecting pyflakes<2.2.0,>=2.1.0\r\n Using cached pyflakes-2.1.1-py2.py3-none-any.whl (59 kB)\r\nCollecting pycodestyle<2.6.0,>=2.5.0\r\n Using cached pycodestyle-2.5.0-py2.py3-none-any.whl (51 kB)\r\nCollecting mccabe<0.7.0,>=0.6.0\r\n Using cached mccabe-0.6.1-py2.py3-none-any.whl (8.6 kB)\r\nRequirement already satisfied: jsondiff>=1.1.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.3.0)\r\nRequirement already satisfied: pytz in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2021.1)\r\nRequirement already satisfied: mock in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (4.0.3)\r\nRequirement already satisfied: MarkupSafe<2.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.1.1)\r\nRequirement already satisfied: python-jose[cryptography]<4.0.0,>=3.1.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.2.0)\r\nRequirement already satisfied: aws-xray-sdk!=0.96,>=0.93 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.8.0)\r\nRequirement already satisfied: cryptography>=2.3.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.4.7)\r\nRequirement already satisfied: more-itertools in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (8.7.0)\r\nRequirement already satisfied: PyYAML>=5.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (5.4.1)\r\nRequirement already satisfied: boto>=2.36.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.49.0)\r\nRequirement already satisfied: idna<3,>=2.5 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.10)\r\nRequirement already satisfied: sshpubkeys>=3.1.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.3.1)\r\nRequirement already satisfied: responses>=0.9.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.13.3)\r\nRequirement already satisfied: xmltodict in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.12.0)\r\nRequirement already satisfied: setuptools in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (52.0.0.post20210125)\r\nRequirement already satisfied: Jinja2>=2.10.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.11.3)\r\nRequirement already satisfied: zipp in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.4.1)\r\nRequirement already satisfied: six>1.9 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.15.0)\r\nRequirement already satisfied: ecdsa<0.15 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.14.1)\r\nRequirement already satisfied: docker>=2.5.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (5.0.0)\r\nRequirement already satisfied: cfn-lint>=0.4.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.49.0)\r\nRequirement already satisfied: grpcio<2,>=1.29.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (1.32.0)\r\nCollecting hdfs<3.0.0,>=2.1.0\r\n Using cached hdfs-2.6.0-py3-none-any.whl (33 kB)\r\nCollecting pyarrow>=0.17.1\r\n Using cached pyarrow-3.0.0-cp37-cp37m-win_amd64.whl (12.6 MB)\r\nCollecting fastavro<2,>=0.21.4\r\n Using cached fastavro-1.4.0-cp37-cp37m-win_amd64.whl (394 kB)\r\nRequirement already satisfied: httplib2<0.18.0,>=0.8 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.17.4)\r\nCollecting pymongo<4.0.0,>=3.8.0\r\n Using cached pymongo-3.11.3-cp37-cp37m-win_amd64.whl (382 kB)\r\nCollecting crcmod<2.0,>=1.7\r\n Using cached crcmod-1.7-py3-none-any.whl\r\nCollecting avro-python3!=1.9.2,<1.10.0,>=1.8.1\r\n Using cached avro_python3-1.9.2.1-py3-none-any.whl\r\nRequirement already satisfied: typing-extensions<3.8.0,>=3.7.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (3.7.4.3)\r\nRequirement already satisfied: future<1.0.0,>=0.18.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.18.2)\r\nCollecting oauth2client<5,>=2.0.1\r\n Using cached oauth2client-4.1.3-py2.py3-none-any.whl (98 kB)\r\nCollecting pydot<2,>=1.2.0\r\n Using cached pydot-1.4.2-py2.py3-none-any.whl (21 kB)\r\nRequirement already satisfied: protobuf<4,>=3.12.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from apache-beam>=2.24.0->datasets==1.5.0.dev0) (3.15.8)\r\nRequirement already satisfied: wrapt in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from aws-xray-sdk!=0.96,>=0.93->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.12.1)\r\nCollecting matplotlib\r\n Using cached matplotlib-3.4.1-cp37-cp37m-win_amd64.whl (7.1 MB)\r\nRequirement already satisfied: junit-xml~=1.9 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.9)\r\nRequirement already satisfied: jsonpatch in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.32)\r\nRequirement already satisfied: jsonschema~=3.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (3.2.0)\r\nRequirement already satisfied: networkx~=2.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.5.1)\r\nRequirement already satisfied: aws-sam-translator>=1.35.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.35.0)\r\nRequirement already satisfied: cffi>=1.12 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cryptography>=2.3.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (1.14.5)\r\nRequirement already satisfied: pycparser in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from cffi>=1.12->cryptography>=2.3.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.20)\r\nRequirement already satisfied: pywin32==227 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from docker>=2.5.1->moto[s3]==1.3.16->datasets==1.5.0.dev0) (227)\r\nRequirement already satisfied: websocket-client>=0.32.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from docker>=2.5.1->moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.58.0)\r\nRequirement already satisfied: docopt in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from hdfs<3.0.0,>=2.1.0->apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.6.2)\r\nRequirement already satisfied: filelock in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from huggingface_hub<0.1.0->datasets==1.5.0.dev0) (3.0.12)\r\nRequirement already satisfied: pyrsistent>=0.14.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from jsonschema~=3.0->cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (0.17.3)\r\nRequirement already satisfied: attrs>=17.4.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from jsonschema~=3.0->cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (20.3.0)\r\nRequirement already satisfied: decorator<5,>=4.3 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from networkx~=2.4->cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (4.4.2)\r\nRequirement already satisfied: rsa>=3.1.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from oauth2client<5,>=2.0.1->apache-beam>=2.24.0->datasets==1.5.0.dev0) (4.7.2)\r\nRequirement already satisfied: pyasn1-modules>=0.0.5 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from oauth2client<5,>=2.0.1->apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.2.8)\r\nRequirement already satisfied: pyasn1>=0.1.7 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from oauth2client<5,>=2.0.1->apache-beam>=2.24.0->datasets==1.5.0.dev0) (0.4.8)\r\nRequirement already satisfied: pyparsing>=2.1.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pydot<2,>=1.2.0->apache-beam>=2.24.0->datasets==1.5.0.dev0) (2.4.7)\r\nRequirement already satisfied: certifi>=2017.4.17 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from requests>=2.19.0->datasets==1.5.0.dev0) (2020.12.5)\r\nRequirement already satisfied: chardet<5,>=3.0.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from requests>=2.19.0->datasets==1.5.0.dev0) (4.0.0)\r\nCollecting keras-preprocessing~=1.1.2\r\n Using cached Keras_Preprocessing-1.1.2-py2.py3-none-any.whl (42 kB)\r\nRequirement already satisfied: termcolor~=1.1.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorflow>=2.3->datasets==1.5.0.dev0) (1.1.0)\r\nRequirement already satisfied: tensorboard~=2.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorflow>=2.3->datasets==1.5.0.dev0) (2.5.0)\r\nRequirement already satisfied: wheel~=0.35 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorflow>=2.3->datasets==1.5.0.dev0) (0.36.2)\r\nCollecting opt-einsum~=3.3.0\r\n Using cached opt_einsum-3.3.0-py3-none-any.whl (65 kB)\r\nCollecting gast==0.3.3\r\n Using cached gast-0.3.3-py2.py3-none-any.whl (9.7 kB)\r\nCollecting google-pasta~=0.2\r\n Using cached google_pasta-0.2.0-py3-none-any.whl (57 kB)\r\nRequirement already satisfied: tensorflow-estimator<2.5.0,>=2.4.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorflow>=2.3->datasets==1.5.0.dev0) (2.4.0)\r\nCollecting astunparse~=1.6.3\r\n Using cached astunparse-1.6.3-py2.py3-none-any.whl (12 kB)\r\nCollecting flatbuffers~=1.12.0\r\n Using cached flatbuffers-1.12-py2.py3-none-any.whl (15 kB)\r\nCollecting h5py~=2.10.0\r\n Using cached h5py-2.10.0-cp37-cp37m-win_amd64.whl (2.5 MB)\r\nRequirement already satisfied: markdown>=2.6.8 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (3.3.4)\r\nRequirement already satisfied: tensorboard-plugin-wit>=1.6.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (1.8.0)\r\nRequirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (0.4.4)\r\nRequirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (0.6.0)\r\nRequirement already satisfied: google-auth<2,>=1.6.3 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (1.30.0)\r\nRequirement already satisfied: cachetools<5.0,>=2.0.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from google-auth<2,>=1.6.3->tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (4.2.2)\r\nRequirement already satisfied: requests-oauthlib>=0.7.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (1.3.0)\r\nRequirement already satisfied: oauthlib>=3.0.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard~=2.4->tensorflow>=2.3->datasets==1.5.0.dev0) (3.1.0)\r\nRequirement already satisfied: regex!=2019.12.17 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from transformers->datasets==1.5.0.dev0) (2021.4.4)\r\nRequirement already satisfied: tokenizers<0.11,>=0.10.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from transformers->datasets==1.5.0.dev0) (0.10.2)\r\nRequirement already satisfied: sacremoses in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from transformers->datasets==1.5.0.dev0) (0.0.45)\r\nRequirement already satisfied: packaging in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from transformers->datasets==1.5.0.dev0) (20.9)\r\nCollecting pathspec<1,>=0.8.1\r\n Using cached pathspec-0.8.1-py2.py3-none-any.whl (28 kB)\r\nRequirement already satisfied: click>=7.1.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from black->datasets==1.5.0.dev0) (7.1.2)\r\nCollecting appdirs\r\n Using cached appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)\r\nCollecting mypy-extensions>=0.4.3\r\n Using cached mypy_extensions-0.4.3-py2.py3-none-any.whl (4.5 kB)\r\nRequirement already satisfied: typed-ast>=1.4.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from black->datasets==1.5.0.dev0) (1.4.3)\r\nCollecting beautifulsoup4\r\n Using cached beautifulsoup4-4.9.3-py3-none-any.whl (115 kB)\r\nRequirement already satisfied: soupsieve>1.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from beautifulsoup4->bs4->datasets==1.5.0.dev0) (2.2.1)\r\nCollecting python-Levenshtein\r\n Using cached python-Levenshtein-0.12.2.tar.gz (50 kB)\r\nRequirement already satisfied: jsonpointer>=1.9 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from jsonpatch->cfn-lint>=0.4.0->moto[s3]==1.3.16->datasets==1.5.0.dev0) (2.1)\r\nRequirement already satisfied: pillow>=6.2.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from matplotlib->bert_score>=0.3.6->datasets==1.5.0.dev0) (8.2.0)\r\nRequirement already satisfied: cycler>=0.10 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from matplotlib->bert_score>=0.3.6->datasets==1.5.0.dev0) (0.10.0)\r\nRequirement already satisfied: kiwisolver>=1.0.1 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from matplotlib->bert_score>=0.3.6->datasets==1.5.0.dev0) (1.3.1)\r\nCollecting multiprocess\r\n Using cached multiprocess-0.70.11-py3-none-any.whl (98 kB)\r\n Using cached multiprocess-0.70.10.zip (2.4 MB)\r\n Using cached multiprocess-0.70.9-py3-none-any.whl\r\nRequirement already satisfied: joblib in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from nltk->datasets==1.5.0.dev0) (1.0.1)\r\nCollecting et-xmlfile\r\n Using cached et_xmlfile-1.1.0-py3-none-any.whl (4.7 kB)\r\nRequirement already satisfied: pyzstd<0.15.0,>=0.14.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from py7zr->datasets==1.5.0.dev0) (0.14.4)\r\nCollecting pyppmd<0.13.0,>=0.12.1\r\n Using cached pyppmd-0.12.1-cp37-cp37m-win_amd64.whl (32 kB)\r\nCollecting pycryptodome>=3.6.6\r\n Using cached pycryptodome-3.10.1-cp35-abi3-win_amd64.whl (1.6 MB)\r\nCollecting bcj-cffi<0.6.0,>=0.5.1\r\n Using cached bcj_cffi-0.5.1-cp37-cp37m-win_amd64.whl (21 kB)\r\nCollecting multivolumefile<0.3.0,>=0.2.0\r\n Using cached multivolumefile-0.2.3-py3-none-any.whl (17 kB)\r\nRequirement already satisfied: iniconfig in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (1.1.1)\r\nRequirement already satisfied: py>=1.8.2 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (1.10.0)\r\nRequirement already satisfied: pluggy<1.0.0a1,>=0.12 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (0.13.1)\r\nRequirement already satisfied: atomicwrites>=1.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (1.4.0)\r\nRequirement already satisfied: colorama in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from pytest->datasets==1.5.0.dev0) (0.4.4)\r\nCollecting pytest-forked\r\n Using cached pytest_forked-1.3.0-py2.py3-none-any.whl (4.7 kB)\r\nCollecting execnet>=1.1\r\n Using cached execnet-1.8.0-py2.py3-none-any.whl (39 kB)\r\nRequirement already satisfied: apipkg>=1.4 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from execnet>=1.1->pytest-xdist->datasets==1.5.0.dev0) (1.5)\r\nCollecting portalocker==2.0.0\r\n Using cached portalocker-2.0.0-py2.py3-none-any.whl (11 kB)\r\nRequirement already satisfied: scikit-learn>=0.21.3 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from seqeval->datasets==1.5.0.dev0) (0.24.2)\r\nRequirement already satisfied: threadpoolctl>=2.0.0 in c:\\programdata\\anaconda3\\envs\\env\\lib\\site-packages (from scikit-learn>=0.21.3->seqeval->datasets==1.5.0.dev0) (2.1.0)\r\nBuilding wheels for collected packages: python-Levenshtein\r\n Building wheel for python-Levenshtein (setup.py) ... error\r\n ERROR: Command errored out with exit status 1:\r\n command: 'C:\\ProgramData\\Anaconda3\\envs\\env\\python.exe' -u -c 'import sys, setuptools, tokenize; sys.argv[0] = '\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"'; __file__='\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"';f=getattr(tokenize, '\"'\"'open'\"'\"', open)(__file__);code=f.read().replace('\"'\"'\\r\\n'\"'\"', '\"'\"'\\n'\"'\"');f.close();exec(compile(code, __file__, '\"'\"'exec'\"'\"'))' bdist_wheel -d 'C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-wheel-8jh7fm18'\r\n cwd: C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-install-ynt_dbm4\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\r\n Complete output (27 lines):\r\n running bdist_wheel\r\n running build\r\n running build_py\r\n creating build\r\n creating build\\lib.win-amd64-3.7\r\n creating build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\StringMatcher.py -> build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\__init__.py -> build\\lib.win-amd64-3.7\\Levenshtein\r\n running egg_info\r\n writing python_Levenshtein.egg-info\\PKG-INFO\r\n writing dependency_links to python_Levenshtein.egg-info\\dependency_links.txt\r\n writing entry points to python_Levenshtein.egg-info\\entry_points.txt\r\n writing namespace_packages to python_Levenshtein.egg-info\\namespace_packages.txt\r\n writing requirements to python_Levenshtein.egg-info\\requires.txt\r\n writing top-level names to python_Levenshtein.egg-info\\top_level.txt\r\n reading manifest file 'python_Levenshtein.egg-info\\SOURCES.txt'\r\n reading manifest template 'MANIFEST.in'\r\n warning: no previously-included files matching '*pyc' found anywhere in distribution\r\n warning: no previously-included files matching '*so' found anywhere in distribution\r\n warning: no previously-included files matching '.project' found anywhere in distribution\r\n warning: no previously-included files matching '.pydevproject' found anywhere in distribution\r\n writing manifest file 'python_Levenshtein.egg-info\\SOURCES.txt'\r\n copying Levenshtein\\_levenshtein.c -> build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\_levenshtein.h -> build\\lib.win-amd64-3.7\\Levenshtein\r\n running build_ext\r\n building 'Levenshtein._levenshtein' extension\r\n error: Microsoft Visual C++ 14.0 or greater is required. Get it with \"Microsoft C++ Build Tools\": https:\/\/visualstudio.microsoft.com\/visual-cpp-build-tools\/\r\n ----------------------------------------\r\n ERROR: Failed building wheel for python-Levenshtein\r\n Running setup.py clean for python-Levenshtein\r\nFailed to build python-Levenshtein\r\nInstalling collected packages: python-Levenshtein, pytest-forked, pyppmd, pymongo, pyflakes, pydot, pycryptodome, pycodestyle, pyarrow, portalocker, pathspec, pandas, opt-einsum, oauth2client, nltk, mypy-extensions, multivolumefile, multiprocess, moto, mccabe, matplotlib, keras-preprocessing, huggingface-hub, hdfs, h5py, google-pasta, gast, flatbuffers, fastavro, execnet, et-xmlfile, entrypoints, crcmod, beautifulsoup4, bcj-cffi, avro-python3, astunparse, appdirs, zstandard, tldextract, tensorflow, sklearn, seqeval, sacrebleu, rouge-score, rarfile, pytest-xdist, py7zr, openpyxl, mwparserfromhell, lxml, langdetect, jiwer, isort, flake8, elasticsearch, datasets, conllu, bs4, black, bert-score, apache-beam\r\n Running setup.py install for python-Levenshtein ... error\r\n ERROR: Command errored out with exit status 1:\r\n command: 'C:\\ProgramData\\Anaconda3\\envs\\env\\python.exe' -u -c 'import sys, setuptools, tokenize; sys.argv[0] = '\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"'; __file__='\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"';f=getattr(tokenize, '\"'\"'open'\"'\"', open)(__file__);code=f.read().replace('\"'\"'\\r\\n'\"'\"', '\"'\"'\\n'\"'\"');f.close();exec(compile(code, __file__, '\"'\"'exec'\"'\"'))' install --record 'C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-record-v7l7zitb\\install-record.txt' --single-version-externally-managed --compile --install-headers 'C:\\ProgramData\\Anaconda3\\envs\\env\\Include\\python-Levenshtein'\r\n cwd: C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-install-ynt_dbm4\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\r\n Complete output (27 lines):\r\n running install\r\n running build\r\n running build_py\r\n creating build\r\n creating build\\lib.win-amd64-3.7\r\n creating build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\StringMatcher.py -> build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\__init__.py -> build\\lib.win-amd64-3.7\\Levenshtein\r\n running egg_info\r\n writing python_Levenshtein.egg-info\\PKG-INFO\r\n writing dependency_links to python_Levenshtein.egg-info\\dependency_links.txt\r\n writing entry points to python_Levenshtein.egg-info\\entry_points.txt\r\n writing namespace_packages to python_Levenshtein.egg-info\\namespace_packages.txt\r\n writing requirements to python_Levenshtein.egg-info\\requires.txt\r\n writing top-level names to python_Levenshtein.egg-info\\top_level.txt\r\n reading manifest file 'python_Levenshtein.egg-info\\SOURCES.txt'\r\n reading manifest template 'MANIFEST.in'\r\n warning: no previously-included files matching '*pyc' found anywhere in distribution\r\n warning: no previously-included files matching '*so' found anywhere in distribution\r\n warning: no previously-included files matching '.project' found anywhere in distribution\r\n warning: no previously-included files matching '.pydevproject' found anywhere in distribution\r\n writing manifest file 'python_Levenshtein.egg-info\\SOURCES.txt'\r\n copying Levenshtein\\_levenshtein.c -> build\\lib.win-amd64-3.7\\Levenshtein\r\n copying Levenshtein\\_levenshtein.h -> build\\lib.win-amd64-3.7\\Levenshtein\r\n running build_ext\r\n building 'Levenshtein._levenshtein' extension\r\n error: Microsoft Visual C++ 14.0 or greater is required. Get it with \"Microsoft C++ Build Tools\": https:\/\/visualstudio.microsoft.com\/visual-cpp-build-tools\/\r\n ----------------------------------------\r\nERROR: Command errored out with exit status 1: 'C:\\ProgramData\\Anaconda3\\envs\\env\\python.exe' -u -c 'import sys, setuptools, tokenize; sys.argv[0] = '\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"'; __file__='\"'\"'C:\\\\Users\\\\VKC~1\\\\AppData\\\\Local\\\\Temp\\\\pip-install-ynt_dbm4\\\\python-levenshtein_c02e7e6f9def4629a475349654670ae9\\\\setup.py'\"'\"';f=getattr(tokenize, '\"'\"'open'\"'\"', open)(__file__);code=f.read().replace('\"'\"'\\r\\n'\"'\"', '\"'\"'\\n'\"'\"');f.close();exec(compile(code, __file__, '\"'\"'exec'\"'\"'))' install --record 'C:\\Users\\VKC~1\\AppData\\Local\\Temp\\pip-record-v7l7zitb\\install-record.txt' --single-version-externally-managed --compile --install-headers 'C:\\ProgramData\\Anaconda3\\envs\\env\\Include\\python-Levenshtein' Check the logs for full command output.\r\n```\r\n\r\nHere are conda and python versions:\r\n\r\n```bat\r\n(env) C:\\testing\\datasets>conda --version\r\nconda 4.9.2\r\n\r\n(env) C:\\testing\\datasets>python --version\r\nPython 3.7.10\r\n```\r\n\r\nPlease help me out. Thanks.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2300\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2300","id":873928169,"node_id":"MDU6SXNzdWU4NzM5MjgxNjk=","number":2300,"title":"Add VoxPopuli","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-05-02T12:17:40Z","updated_at":"2021-05-13T10:31:52Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** Voxpopuli\r\n- **Description:** VoxPopuli is raw data is collected from 2009-2020 European Parliament event recordings\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2101.00390\r\n- **Data:** https:\/\/github.com\/facebookresearch\/voxpopuli\r\n- **Motivation:** biggest unlabeled speech dataset\r\n\r\n**Note**: Since the dataset is so huge, we should only add the config `10k` in the beginning.\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2299\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2299","id":873914717,"node_id":"MDU6SXNzdWU4NzM5MTQ3MTc=","number":2299,"title":"My iPhone","user":{"login":"Jasonbuchanan1983","id":82856229,"node_id":"MDQ6VXNlcjgyODU2MjI5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/82856229?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983","html_url":"https:\/\/github.com\/Jasonbuchanan1983","followers_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/followers","following_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/orgs","repos_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/repos","events_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Jasonbuchanan1983\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-02T11:11:11Z","updated_at":"2021-07-23T09:24:16Z","closed_at":"2021-05-03T08:17:38Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2298\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2298","id":873771942,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI4NDk2NjM2","number":2298,"title":"Mapping in the distributed setting","user":{"login":"TevenLeScao","id":26709476,"node_id":"MDQ6VXNlcjI2NzA5NDc2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26709476?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TevenLeScao","html_url":"https:\/\/github.com\/TevenLeScao","followers_url":"https:\/\/api.github.com\/users\/TevenLeScao\/followers","following_url":"https:\/\/api.github.com\/users\/TevenLeScao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TevenLeScao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TevenLeScao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TevenLeScao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TevenLeScao\/orgs","repos_url":"https:\/\/api.github.com\/users\/TevenLeScao\/repos","events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TevenLeScao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-05-01T21:23:05Z","updated_at":"2021-05-03T13:54:53Z","closed_at":"2021-05-03T13:54:53Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2298","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2298","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2298.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2298.patch"},"body":"The barrier trick for distributed mapping as discussed on Thursday with @lhoestq","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2296\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2296","id":872974907,"node_id":"MDU6SXNzdWU4NzI5NzQ5MDc=","number":2296,"title":"1","user":{"login":"zinnyi","id":82880142,"node_id":"MDQ6VXNlcjgyODgwMTQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/82880142?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/zinnyi","html_url":"https:\/\/github.com\/zinnyi","followers_url":"https:\/\/api.github.com\/users\/zinnyi\/followers","following_url":"https:\/\/api.github.com\/users\/zinnyi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/zinnyi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/zinnyi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/zinnyi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/zinnyi\/orgs","repos_url":"https:\/\/api.github.com\/users\/zinnyi\/repos","events_url":"https:\/\/api.github.com\/users\/zinnyi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/zinnyi\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-30T17:53:49Z","updated_at":"2021-05-03T08:17:31Z","closed_at":"2021-05-03T08:17:31Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\n- **Name:** *name of the dataset*\n- **Description:** *short description of the dataset (or link to social media or blog post)*\n- **Paper:** *link to the dataset paper if available*\n- **Data:** *link to the Github repository or current dataset location*\n- **Motivation:** *what are some good reasons to have this dataset*\n\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2295\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2295","id":872902867,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI3NzY0NDk3","number":2295,"title":"Create ExtractManager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/6","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/6\/labels","id":6836458,"node_id":"MDk6TWlsZXN0b25lNjgzNjQ1OA==","number":6,"title":"1.10","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":29,"state":"closed","created_at":"2021-06-08T18:48:33Z","updated_at":"2021-07-21T15:36:49Z","due_on":"2021-08-05T07:00:00Z","closed_at":"2021-07-21T15:36:49Z"},"comments":2,"created_at":"2021-04-30T17:13:34Z","updated_at":"2021-07-12T14:12:03Z","closed_at":"2021-07-08T08:11:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2295","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2295","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2295.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2295.patch"},"body":"Perform refactoring to decouple extract functionality.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2294\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2294","id":872136075,"node_id":"MDU6SXNzdWU4NzIxMzYwNzU=","number":2294,"title":"Slow #0 when using map to tokenize.","user":{"login":"VerdureChen","id":31714566,"node_id":"MDQ6VXNlcjMxNzE0NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31714566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VerdureChen","html_url":"https:\/\/github.com\/VerdureChen","followers_url":"https:\/\/api.github.com\/users\/VerdureChen\/followers","following_url":"https:\/\/api.github.com\/users\/VerdureChen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VerdureChen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VerdureChen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VerdureChen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VerdureChen\/orgs","repos_url":"https:\/\/api.github.com\/users\/VerdureChen\/repos","events_url":"https:\/\/api.github.com\/users\/VerdureChen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VerdureChen\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-30T08:00:33Z","updated_at":"2021-05-04T11:00:11Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, _datasets_ is really amazing! I am following [run_mlm_no_trainer.py](url) to pre-train BERT, and it uses `tokenized_datasets = raw_datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=args.preprocessing_num_workers,\r\n remove_columns=column_names,\r\n load_from_cache_file=not args.overwrite_cache,\r\n )` to tokenize by multiprocessing. However, I have found that when `num_proc`>1\uff0cthe process _#0_ is much slower than others.\r\nIt looks like this:\r\n![image](https:\/\/user-images.githubusercontent.com\/31714566\/116665555-81246280-a9cc-11eb-8a37-6e608ab310d0.png)\r\nIt takes more than 12 hours for #0, while others just about half an hour. Could anyone tell me it is normal or not, and is there any methods to speed up it?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2293\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2293","id":872079385,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI3MDQzNzQ3","number":2293,"title":"imdb dataset from Don't Stop Pretraining Paper","user":{"login":"BobbyManion","id":52530809,"node_id":"MDQ6VXNlcjUyNTMwODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52530809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BobbyManion","html_url":"https:\/\/github.com\/BobbyManion","followers_url":"https:\/\/api.github.com\/users\/BobbyManion\/followers","following_url":"https:\/\/api.github.com\/users\/BobbyManion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BobbyManion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BobbyManion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BobbyManion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BobbyManion\/orgs","repos_url":"https:\/\/api.github.com\/users\/BobbyManion\/repos","events_url":"https:\/\/api.github.com\/users\/BobbyManion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BobbyManion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-30T06:40:48Z","updated_at":"2021-04-30T06:54:25Z","closed_at":"2021-04-30T06:54:25Z","author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2293","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2293","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2293.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2293.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2292\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2292","id":871230183,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MjgzNTYy","number":2292,"title":"Fixed typo seperate->separate","user":{"login":"laksh9950","id":32505743,"node_id":"MDQ6VXNlcjMyNTA1NzQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32505743?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/laksh9950","html_url":"https:\/\/github.com\/laksh9950","followers_url":"https:\/\/api.github.com\/users\/laksh9950\/followers","following_url":"https:\/\/api.github.com\/users\/laksh9950\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/laksh9950\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/laksh9950\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/laksh9950\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/laksh9950\/orgs","repos_url":"https:\/\/api.github.com\/users\/laksh9950\/repos","events_url":"https:\/\/api.github.com\/users\/laksh9950\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/laksh9950\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T16:40:53Z","updated_at":"2021-04-30T13:29:18Z","closed_at":"2021-04-30T13:03:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2292","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2292","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2292.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2292.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2291\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2291","id":871216757,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MjcyNzE5","number":2291,"title":"Don't copy recordbatches in memory during a table deepcopy","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T16:26:05Z","updated_at":"2021-04-29T16:34:35Z","closed_at":"2021-04-29T16:34:34Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2291","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2291","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2291.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2291.patch"},"body":"Fix issue #2276 and hopefully #2134\r\n\r\nThe recordbatches of the `IndexedTableMixin` used to speed up queries to the table were copied in memory during a table deepcopy.\r\nThis resulted in `concatenate_datasets`, `load_from_disk` and other methods to always bring the data in memory.\r\n\r\nI fixed the copy similarly to #2287 and updated the test to make sure it doesn't happen again (added a test for deepcopy + make sure that the immutable arrow objects are passed to the copied table without being copied).\r\n\r\nThe issue was not caught by our tests because the total allocated bytes value in PyArrow isn't updated when deepcopying recordbatches: the copy in memory wasn't detected. This behavior looks like a bug in PyArrow, I'll open a ticket on JIRA.\r\n\r\nThanks @samsontmr , @TaskManager91 and @mariosasko for the help\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2290\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2290","id":871145817,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MjEyNTIz","number":2290,"title":"Bbaw egyptian","user":{"login":"phiwi","id":54144149,"node_id":"MDQ6VXNlcjU0MTQ0MTQ5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/54144149?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/phiwi","html_url":"https:\/\/github.com\/phiwi","followers_url":"https:\/\/api.github.com\/users\/phiwi\/followers","following_url":"https:\/\/api.github.com\/users\/phiwi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/phiwi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/phiwi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/phiwi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/phiwi\/orgs","repos_url":"https:\/\/api.github.com\/users\/phiwi\/repos","events_url":"https:\/\/api.github.com\/users\/phiwi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/phiwi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-04-29T15:27:58Z","updated_at":"2021-05-06T17:25:25Z","closed_at":"2021-05-06T17:25:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2290","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2290","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2290.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2290.patch"},"body":"This is the \"hieroglyph corpus\" that I could unfortunately not contribute during the marathon. I re-extracted it again now, so that it is in the state as used in my paper (seee documentation). I hope it satiesfies your requirements and wish every scientist out their loads of fun deciphering a 5.000 years old language :-)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2289\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2289","id":871118573,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MTg5MDU3","number":2289,"title":"Allow collaborators to self-assign issues","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-29T15:07:06Z","updated_at":"2021-04-30T18:28:16Z","closed_at":"2021-04-30T18:28:16Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2289","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2289","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2289.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2289.patch"},"body":"Allow collaborators (without write access to the repository) to self-assign issues.\r\n\r\nIn order to self-assign an issue, they have to comment it with the word: `#take` or `#self-assign`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2288\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2288","id":871111235,"node_id":"MDU6SXNzdWU4NzExMTEyMzU=","number":2288,"title":"Load_dataset for local CSV files","user":{"login":"sstojanoska","id":17052700,"node_id":"MDQ6VXNlcjE3MDUyNzAw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17052700?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sstojanoska","html_url":"https:\/\/github.com\/sstojanoska","followers_url":"https:\/\/api.github.com\/users\/sstojanoska\/followers","following_url":"https:\/\/api.github.com\/users\/sstojanoska\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sstojanoska\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sstojanoska\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sstojanoska\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sstojanoska\/orgs","repos_url":"https:\/\/api.github.com\/users\/sstojanoska\/repos","events_url":"https:\/\/api.github.com\/users\/sstojanoska\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sstojanoska\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-29T15:01:10Z","updated_at":"2021-06-15T13:49:26Z","closed_at":"2021-06-15T13:49:26Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"The method load_dataset fails to correctly load a dataset from csv. \r\n\r\nMoreover, I am working on a token-classification task ( POS tagging) , where each row in my CSV contains two columns each of them having a list of strings.\r\nrow example:\r\n```tokens | labels\r\n['I' , 'am', 'John'] | ['PRON', 'AUX', 'PROPN' ] \r\n```\r\nThe method, loads each list as a string: (i.g \"['I' , 'am', 'John']\").\r\nTo solve this issue, I copied the Datasets.Features, created Sequence types ( instead of Value) and tried to cast the features type\r\n```\r\nnew_features['tokens'] = Sequence(feature=Value(dtype='string', id=None))\r\nnew_features['labels'] = Sequence(feature=ClassLabel(num_classes=len(tag2idx), names=list(unique_tags)))\r\ndataset = dataset.cast(new_features)\r\n```\r\nbut I got the following error \r\n```\r\nArrowNotImplementedError: Unsupported cast from string to list using function cast_list\r\n```\r\nMoreover, I tried to set feature parameter in load_dataset method, to my new_features, but this fails as well.\r\nHow can this be solved ?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2287\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2287","id":871063374,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MTQ0MTQ3","number":2287,"title":"Avoid copying table's record batches","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-29T14:15:01Z","updated_at":"2021-04-29T16:34:23Z","closed_at":"2021-04-29T16:34:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2287","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2287","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2287.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2287.patch"},"body":"Fixes #2276","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2286\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2286","id":871032393,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MTE5MTE2","number":2286,"title":"Fix metadata validation with config names","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T13:44:32Z","updated_at":"2021-04-29T14:07:29Z","closed_at":"2021-04-29T14:07:28Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2286","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2286","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2286.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2286.patch"},"body":"I noticed in https:\/\/github.com\/huggingface\/datasets\/pull\/2280 that the metadata validator doesn't parse the tags in the readme properly when then contain the tags per config.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2285\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2285","id":871005236,"node_id":"MDU6SXNzdWU4NzEwMDUyMzY=","number":2285,"title":"Help understanding how to build a dataset for language modeling as with the old TextDataset","user":{"login":"danieldiezmallo","id":46021411,"node_id":"MDQ6VXNlcjQ2MDIxNDEx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46021411?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danieldiezmallo","html_url":"https:\/\/github.com\/danieldiezmallo","followers_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/followers","following_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/orgs","repos_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/repos","events_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danieldiezmallo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-29T13:16:45Z","updated_at":"2021-05-19T07:22:45Z","closed_at":"2021-05-19T07:22:39Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello,\r\n\r\nI am trying to load a custom dataset that I will then use for language modeling. The dataset consists of a text file that has a whole document in each line, meaning that each line overpasses the normal 512 tokens limit of most tokenizers.\r\n\r\nI would like to understand what is the process to build a text dataset that tokenizes each line, having previously split the documents in the dataset into lines of a \"tokenizable\" size, as the old TextDataset class would do, where you only had to do the following, and a tokenized dataset without text loss would be available to pass to a DataCollator:\r\n\r\n```\r\nmodel_checkpoint = 'distilbert-base-uncased'\r\n\r\nfrom transformers import AutoTokenizer\r\ntokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\r\n\r\nfrom transformers import TextDataset\r\n\r\ndataset = TextDataset(\r\n tokenizer=tokenizer,\r\n file_path=\"path\/to\/text_file.txt\",\r\n block_size=512,\r\n)\r\n```\r\n\r\nFor now, what I have is the following, which, of course, throws an error because each line is longer than the maximum block size in the tokenizer:\r\n\r\n```\r\nimport datasets\r\ndataset = datasets.load_dataset('path\/to\/text_file.txt')\r\n\r\nmodel_checkpoint = 'distilbert-base-uncased'\r\ntokenizer = AutoTokenizer.from_pretrained(model_checkpoint)\r\n\r\ndef tokenize_function(examples):\r\n return tokenizer(examples[\"text\"])\r\n\r\ntokenized_datasets = dataset.map(tokenize_function, batched=True, num_proc=4, remove_columns=[\"text\"])\r\n\r\ntokenized_datasets\r\n```\r\n\r\nSo what would be the \"standard\" way of creating a dataset in the way it was done before?\r\n\r\nThank you very much for the help :))","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2284\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2284","id":870932710,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MDM5MDc5","number":2284,"title":"Initialize Imdb dataset as used in Don't Stop Pretraining Paper","user":{"login":"BobbyManion","id":52530809,"node_id":"MDQ6VXNlcjUyNTMwODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52530809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BobbyManion","html_url":"https:\/\/github.com\/BobbyManion","followers_url":"https:\/\/api.github.com\/users\/BobbyManion\/followers","following_url":"https:\/\/api.github.com\/users\/BobbyManion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BobbyManion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BobbyManion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BobbyManion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BobbyManion\/orgs","repos_url":"https:\/\/api.github.com\/users\/BobbyManion\/repos","events_url":"https:\/\/api.github.com\/users\/BobbyManion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BobbyManion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T11:52:38Z","updated_at":"2021-04-29T12:54:34Z","closed_at":"2021-04-29T12:54:34Z","author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2284","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2284","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2284.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2284.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2283\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2283","id":870926475,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MDM0MDk5","number":2283,"title":"Initialize imdb dataset from don't stop pretraining paper","user":{"login":"BobbyManion","id":52530809,"node_id":"MDQ6VXNlcjUyNTMwODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52530809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BobbyManion","html_url":"https:\/\/github.com\/BobbyManion","followers_url":"https:\/\/api.github.com\/users\/BobbyManion\/followers","following_url":"https:\/\/api.github.com\/users\/BobbyManion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BobbyManion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BobbyManion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BobbyManion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BobbyManion\/orgs","repos_url":"https:\/\/api.github.com\/users\/BobbyManion\/repos","events_url":"https:\/\/api.github.com\/users\/BobbyManion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BobbyManion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T11:44:54Z","updated_at":"2021-04-29T11:50:24Z","closed_at":"2021-04-29T11:50:24Z","author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2283","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2283","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2283.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2283.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2282\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2282","id":870900332,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI2MDEyMzM3","number":2282,"title":"Initialize imdb dataset from don't stop pretraining paper","user":{"login":"BobbyManion","id":52530809,"node_id":"MDQ6VXNlcjUyNTMwODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/52530809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BobbyManion","html_url":"https:\/\/github.com\/BobbyManion","followers_url":"https:\/\/api.github.com\/users\/BobbyManion\/followers","following_url":"https:\/\/api.github.com\/users\/BobbyManion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BobbyManion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BobbyManion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BobbyManion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BobbyManion\/orgs","repos_url":"https:\/\/api.github.com\/users\/BobbyManion\/repos","events_url":"https:\/\/api.github.com\/users\/BobbyManion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BobbyManion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T11:17:56Z","updated_at":"2021-04-29T11:43:51Z","closed_at":"2021-04-29T11:43:51Z","author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2282","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2282","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2282.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2282.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2281\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2281","id":870792784,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI1OTI2MjAw","number":2281,"title":"Update multi_woz_v22 checksum","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-29T09:09:11Z","updated_at":"2021-04-29T13:41:35Z","closed_at":"2021-04-29T13:41:34Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2281","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2281","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2281.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2281.patch"},"body":"Fix issue https:\/\/github.com\/huggingface\/datasets\/issues\/1876\r\nThe files were changed in https:\/\/github.com\/budzianowski\/multiwoz\/pull\/72","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2280\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2280","id":870780431,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI1OTE2Mzcy","number":2280,"title":"Fixed typo seperate->separate","user":{"login":"laksh9950","id":32505743,"node_id":"MDQ6VXNlcjMyNTA1NzQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32505743?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/laksh9950","html_url":"https:\/\/github.com\/laksh9950","followers_url":"https:\/\/api.github.com\/users\/laksh9950\/followers","following_url":"https:\/\/api.github.com\/users\/laksh9950\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/laksh9950\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/laksh9950\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/laksh9950\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/laksh9950\/orgs","repos_url":"https:\/\/api.github.com\/users\/laksh9950\/repos","events_url":"https:\/\/api.github.com\/users\/laksh9950\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/laksh9950\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-29T08:55:46Z","updated_at":"2021-04-29T16:41:22Z","closed_at":"2021-04-29T16:41:16Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2280","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2280","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2280.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2280.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2279\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2279","id":870431662,"node_id":"MDU6SXNzdWU4NzA0MzE2NjI=","number":2279,"title":"Compatibility with Ubuntu 18 and GLIBC 2.27?","user":{"login":"tginart","id":11379648,"node_id":"MDQ6VXNlcjExMzc5NjQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11379648?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/tginart","html_url":"https:\/\/github.com\/tginart","followers_url":"https:\/\/api.github.com\/users\/tginart\/followers","following_url":"https:\/\/api.github.com\/users\/tginart\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/tginart\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/tginart\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/tginart\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/tginart\/orgs","repos_url":"https:\/\/api.github.com\/users\/tginart\/repos","events_url":"https:\/\/api.github.com\/users\/tginart\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/tginart\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-28T22:08:07Z","updated_at":"2021-04-29T07:42:42Z","closed_at":"2021-04-29T07:42:42Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nFor use on Ubuntu systems, it seems that datasets requires GLIBC 2.29. However, Ubuntu 18 runs with GLIBC 2.27 and it seems [non-trivial to upgrade GLIBC to 2.29 for Ubuntu 18 users](https:\/\/www.digitalocean.com\/community\/questions\/how-install-glibc-2-29-or-higher-in-ubuntu-18-04). \r\n\r\nI'm not sure if there is anything that can be done about this, but I'd like to confirm that using huggingface\/datasets requires either an upgrade to Ubuntu 19\/20 or a hand-rolled install of a higher version of GLIBC.\r\n\r\n## Steps to reproduce the bug\r\n1. clone the transformers repo\r\n2. move to examples\/pytorch\/language-modeling\r\n3. run example command:\r\n```python run_clm.py --model_name_or_path gpt2 --dataset_name wikitext --dataset_config_name wikitext-2-raw-v1 --do_train --do_eval --output_dir \/tmp\/test-clm```\r\n\r\n\r\n## Expected results\r\nAs described in the transformers repo.\r\n\r\n## Actual results\r\n```Traceback (most recent call last):\r\n File \"run_clm.py\", line 34, in \r\n from transformers import (\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/__init__.py\", line 2487, in __getattr__\r\n return super().__getattr__(name)\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/file_utils.py\", line 1699, in __getattr__\r\n module = self._get_module(self._class_to_module[name])\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/__init__.py\", line 2481, in _get_module\r\n return importlib.import_module(\".\" + module_name, self.__name__)\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/importlib\/__init__.py\", line 127, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/models\/__init__.py\", line 19, in \r\n from . import (\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/models\/layoutlm\/__init__.py\", line 23, in \r\n from .tokenization_layoutlm import LayoutLMTokenizer\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/models\/layoutlm\/tokenization_layoutlm.py\", line 19, in \r\n from ..bert.tokenization_bert import BertTokenizer\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/models\/bert\/tokenization_bert.py\", line 23, in \r\n from ...tokenization_utils import PreTrainedTokenizer, _is_control, _is_punctuation, _is_whitespace\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils.py\", line 26, in \r\n from .tokenization_utils_base import (\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 68, in \r\n from tokenizers import AddedToken\r\n File \"\/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/tokenizers\/__init__.py\", line 79, in \r\n from .tokenizers import (\r\nImportError: \/lib\/x86_64-linux-gnu\/libm.so.6: version `GLIBC_2.29' not found (required by \/home\/tginart\/anaconda3\/envs\/huggingface\/lib\/python3.7\/site-packages\/tokenizers\/tokenizers.cpython-37m-x86_64-linux-gnu.so)\r\n```\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n```\r\n- Datasets: 1.6.1\r\n- Python: 3.7.10 (default, Feb 26 2021, 18:47:35) \r\n[GCC 7.3.0]\r\n- Platform: Linux-4.15.0-128-generic-x86_64-with-debian-buster-sid\r\n\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2278\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2278","id":870088059,"node_id":"MDU6SXNzdWU4NzAwODgwNTk=","number":2278,"title":"Loss result inGptNeoForCasual","user":{"login":"Yossillamm","id":51174606,"node_id":"MDQ6VXNlcjUxMTc0NjA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/51174606?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Yossillamm","html_url":"https:\/\/github.com\/Yossillamm","followers_url":"https:\/\/api.github.com\/users\/Yossillamm\/followers","following_url":"https:\/\/api.github.com\/users\/Yossillamm\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Yossillamm\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Yossillamm\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Yossillamm\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Yossillamm\/orgs","repos_url":"https:\/\/api.github.com\/users\/Yossillamm\/repos","events_url":"https:\/\/api.github.com\/users\/Yossillamm\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Yossillamm\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-28T15:39:52Z","updated_at":"2021-05-06T16:14:23Z","closed_at":"2021-05-06T16:14:23Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Is there any way you give the \" loss\" and \"logits\" results in the gpt neo api? ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2277\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2277","id":870071994,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI1MzI5NjIz","number":2277,"title":"Create CacheManager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":5,"closed_issues":2,"state":"open","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-08-04T17:03:52Z","due_on":"2021-08-21T07:00:00Z","closed_at":null},"comments":0,"created_at":"2021-04-28T15:23:42Z","updated_at":"2021-07-21T15:36:05Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2277","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2277","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2277.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2277.patch"},"body":"Perform refactoring to decouple cache functionality (method `as_dataset`).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2276\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2276","id":870010511,"node_id":"MDU6SXNzdWU4NzAwMTA1MTE=","number":2276,"title":"concatenate_datasets loads all the data into memory","user":{"login":"TaskManager91","id":7063207,"node_id":"MDQ6VXNlcjcwNjMyMDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7063207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TaskManager91","html_url":"https:\/\/github.com\/TaskManager91","followers_url":"https:\/\/api.github.com\/users\/TaskManager91\/followers","following_url":"https:\/\/api.github.com\/users\/TaskManager91\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TaskManager91\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TaskManager91\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TaskManager91\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TaskManager91\/orgs","repos_url":"https:\/\/api.github.com\/users\/TaskManager91\/repos","events_url":"https:\/\/api.github.com\/users\/TaskManager91\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TaskManager91\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2021-04-28T14:27:21Z","updated_at":"2021-05-03T08:41:55Z","closed_at":"2021-05-03T08:41:55Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nWhen I try to concatenate 2 datasets (10GB each) , the entire data is loaded into memory instead of being written directly to disk.\r\n\r\nInterestingly, this happens when trying to save the new dataset to disk or concatenating it again.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/7063207\/116420321-2b21b480-a83e-11eb-9006-8f6ca729fb6f.png)\r\n\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import concatenate_datasets, load_from_disk\r\n\r\ntest_sampled_pro = load_from_disk(\"test_sampled_pro\")\r\nval_sampled_pro = load_from_disk(\"val_sampled_pro\")\r\n\r\nbig_set = concatenate_datasets([test_sampled_pro, val_sampled_pro])\r\n\r\n# Loaded to memory\r\nbig_set.save_to_disk(\"big_set\")\r\n\r\n# Loaded to memory\r\nbig_set = concatenate_datasets([big_set, val_sampled_pro])\r\n```\r\n\r\n## Expected results\r\nThe data should be loaded into memory in batches and then saved directly to disk.\r\n\r\n## Actual results\r\nThe entire data set is loaded into the memory and then saved to the hard disk.\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n```python\r\n- Datasets: 1.6.1\r\n- Python: 3.8.8 (default, Apr 13 2021, 19:58:26) \r\n[GCC 7.3.0]\r\n- Platform: Linux-5.4.72-microsoft-standard-WSL2-x86_64-with-glibc2.10\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2275\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2275","id":869378311,"node_id":"MDU6SXNzdWU4NjkzNzgzMTE=","number":2275,"title":"SNLI dataset has labels of -1 ","user":{"login":"puzzler10","id":17426779,"node_id":"MDQ6VXNlcjE3NDI2Nzc5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17426779?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/puzzler10","html_url":"https:\/\/github.com\/puzzler10","followers_url":"https:\/\/api.github.com\/users\/puzzler10\/followers","following_url":"https:\/\/api.github.com\/users\/puzzler10\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/puzzler10\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/puzzler10\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/puzzler10\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/puzzler10\/orgs","repos_url":"https:\/\/api.github.com\/users\/puzzler10\/repos","events_url":"https:\/\/api.github.com\/users\/puzzler10\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/puzzler10\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-28T00:32:25Z","updated_at":"2021-05-17T13:34:18Z","closed_at":"2021-05-17T13:34:18Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"There are a number of rows with a label of -1 in the SNLI dataset. The dataset descriptions [here](https:\/\/nlp.stanford.edu\/projects\/snli\/) and [here](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/snli) don't list -1 as a label possibility, and neither does the dataset viewer. As examples, see index 107 or 124 of the test set.\r\n\r\nIt isn't clear what these labels mean. I found a [line of code](https:\/\/github.com\/huggingface\/datasets\/blob\/80e59ef178d3bb2090d091bc32315c655eb0633d\/datasets\/snli\/snli.py#L94) that seems to put them in but it seems still unclear why they are there. The current workaround is to just drop the rows from any model being trained. \r\n\r\nPerhaps the documentation should be updated.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2274\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2274","id":869186276,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0NTkyMjQx","number":2274,"title":"Always update metadata in arrow schema","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-27T19:21:57Z","updated_at":"2021-04-29T09:57:51Z","closed_at":"2021-04-29T09:57:50Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2274","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2274","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2274.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2274.patch"},"body":"We store a redundant copy of the features in the metadata of the schema of the arrow table. This is used to recover the features when doing `Dataset.from_file`. These metadata are updated after each transfor, that changes the feature types.\r\n\r\nFor each function that transforms the feature types of the dataset, I added a step in the tests to make sure the metadata in the arrow schema are up to date.\r\n\r\nI also added a line to update the metadata directly in the Dataset.__init__ method.\r\nThis way even a dataset instantiated with __init__ will have a table with the right metadata.\r\n\r\ncc @mariosasko ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2273\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2273","id":869046290,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0NDcxODc1","number":2273,"title":"Added CUAD metrics","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-27T16:49:12Z","updated_at":"2021-04-29T13:59:47Z","closed_at":"2021-04-29T13:59:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2273","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2273","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2273.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2273.patch"},"body":"`EM`, `F1`, `AUPR`, `Precision@80%Recall`, and `Precision@90%Recall` metrics supported for CUAD","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2272\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2272","id":869017977,"node_id":"MDU6SXNzdWU4NjkwMTc5Nzc=","number":2272,"title":"Bug in Dataset.class_encode_column","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-27T16:13:18Z","updated_at":"2021-04-30T12:54:27Z","closed_at":"2021-04-30T12:54:27Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nAll the rest of the columns except the one passed to `Dataset.class_encode_column` are discarded.\r\n\r\n## Expected results\r\n\r\nAll the original columns should be kept.\r\n\r\nThis needs regression tests.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2271\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2271","id":869002141,"node_id":"MDU6SXNzdWU4NjkwMDIxNDE=","number":2271,"title":"Synchronize table metadata with features","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-27T15:55:13Z","updated_at":"2021-04-28T12:48:25Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"**Is your feature request related to a problem? Please describe.**\r\n\r\nAs pointed out in this [comment](https:\/\/github.com\/huggingface\/datasets\/pull\/2145#discussion_r621326767):\r\n> Metadata stored in the schema is just a redundant information regarding the feature types.\r\nIt is used when calling Dataset.from_file to know which feature types to use.\r\nThese metadata are stored in the schema of the pyarrow table by using `update_metadata_with_features`.\r\nHowever this something that's almost never tested properly.\r\n\r\n**Describe the solution you'd like**\r\n\r\nWe should find a way to always make sure that the metadata (in `self.data.schema.metadata`) are synced with the actual feature types (in `self.info.features`).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2270\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2270","id":868913660,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0MzU5Njky","number":2270,"title":"Fix iterable interface expected by numpy","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-27T14:35:56Z","updated_at":"2021-04-28T17:39:27Z","closed_at":"2021-04-28T17:39:27Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2270","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2270","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2270.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2270.patch"},"body":"Numpy expects the old iterable interface with `__getitem__` instead of `__iter__`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2269\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2269","id":868878468,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0MzMwNDA3","number":2269,"title":"Fix query table with iterable","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-27T13:59:38Z","updated_at":"2021-04-27T14:21:57Z","closed_at":"2021-04-27T14:21:56Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2269","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2269","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2269.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2269.patch"},"body":"The benchmark runs are failing on master because it tries to use an iterable to query the dataset.\r\nHowever there's currently an issue caused by the use of `np.array` instead of `np.fromiter` on the iterable.\r\nThis PR fixes it","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2268\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2268","id":868773380,"node_id":"MDExOlB1bGxSZXF1ZXN0NjI0MjQyODg1","number":2268,"title":"Don't use pyarrow 4.0.0 since it segfaults when casting a sliced ListArray of integers","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-27T11:58:28Z","updated_at":"2021-06-12T12:44:49Z","closed_at":"2021-04-27T13:43:20Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2268","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2268","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2268.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2268.patch"},"body":"This test `tests\/test_table.py::test_concatenation_table_cast` segfaults with the latest update of pyarrow 4.0.0.\r\nSetting `pyarrow<4.0.0` for now. I'll open an issue on JIRA once I know more about the origin of the issue","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2267\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2267","id":868291129,"node_id":"MDU6SXNzdWU4NjgyOTExMjk=","number":2267,"title":"DatasetDict save load Failing test in 1.6 not in 1.5","user":{"login":"timothyjlaurent","id":2000204,"node_id":"MDQ6VXNlcjIwMDAyMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2000204?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/timothyjlaurent","html_url":"https:\/\/github.com\/timothyjlaurent","followers_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/followers","following_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/orgs","repos_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/repos","events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/timothyjlaurent\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-04-27T00:03:25Z","updated_at":"2021-05-28T15:27:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWe have a test that saves a DatasetDict to disk and then loads it from disk. In 1.6 there is an incompatibility in the schema.\r\n\r\n\r\n\r\n\r\nDowngrading to `>1.6` -- fixes the problem.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\n\r\n### Load a dataset dict from jsonl \r\n\r\npath = '\/test\/foo'\r\n\r\nds_dict.save_to_disk(path)\r\n\r\nds_from_disk = DatasetDict.load_from_disk(path). ## <-- this is where I see the error on 1.6\r\n```\r\n\r\n## Expected results\r\n\r\nUpgrading to 1.6 shouldn't break that test. We should be able to serialize to and from disk.\r\n\r\n## Actual results\r\n```\r\n # Infer features if None\r\n inferred_features = Features.from_arrow_schema(arrow_table.schema)\r\n if self.info.features is None:\r\n self.info.features = inferred_features\r\n \r\n # Infer fingerprint if None\r\n \r\n if self._fingerprint is None:\r\n self._fingerprint = generate_fingerprint(self)\r\n \r\n # Sanity checks\r\n \r\n assert self.features is not None, \"Features can't be None in a Dataset object\"\r\n assert self._fingerprint is not None, \"Fingerprint can't be None in a Dataset object\"\r\n if self.info.features.type != inferred_features.type:\r\n> raise ValueError(\r\n \"External features info don't match the dataset:\\nGot\\n{}\\nwith type\\n{}\\n\\nbut expected something like\\n{}\\nwith type\\n{}\".format(\r\n self.info.features, self.info.features.type, inferred_features, inferred_features.type\r\n )\r\n )\r\nE ValueError: External features info don't match the dataset:\r\nE Got\r\nE {'_input_hash': Value(dtype='int64', id=None), '_task_hash': Value(dtype='int64', id=None), '_view_id': Value(dtype='string', id=None), 'answer': Value(dtype='string', id=None), 'encoding__ids': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'encoding__offsets': Sequence(feature=Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), length=-1, id=None), 'encoding__overflowing': Sequence(feature=Value(dtype='null', id=None), length=-1, id=None), 'encoding__tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'encoding__words': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'ner_ids': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'ner_labels': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'relations': [{'child': Value(dtype='int64', id=None), 'child_span': {'end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None)}, 'color': Value(dtype='string', id=None), 'head': Value(dtype='int64', id=None), 'head_span': {'end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None)}, 'label': Value(dtype='string', id=None)}], 'spans': [{'end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None), 'token_end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None), 'type': Value(dtype='string', id=None)}], 'text': Value(dtype='string', id=None), 'tokens': [{'disabled': Value(dtype='bool', id=None), 'end': Value(dtype='int64', id=None), 'id': Value(dtype='int64', id=None), 'start': Value(dtype='int64', id=None), 'text': Value(dtype='string', id=None), 'ws': Value(dtype='bool', id=None)}]}\r\nE with type\r\nE struct<_input_hash: int64, _task_hash: int64, _view_id: string, answer: string, encoding__ids: list, encoding__offsets: list>, encoding__overflowing: list, encoding__tokens: list, encoding__words: list, ner_ids: list, ner_labels: list, relations: list, color: string, head: int64, head_span: struct, label: string>>, spans: list>, text: string, tokens: list>>\r\nE \r\nE but expected something like\r\nE {'_input_hash': Value(dtype='int64', id=None), '_task_hash': Value(dtype='int64', id=None), '_view_id': Value(dtype='string', id=None), 'answer': Value(dtype='string', id=None), 'encoding__ids': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'encoding__offsets': Sequence(feature=Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), length=-1, id=None), 'encoding__overflowing': Sequence(feature=Value(dtype='null', id=None), length=-1, id=None), 'encoding__tokens': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'encoding__words': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'ner_ids': Sequence(feature=Value(dtype='int64', id=None), length=-1, id=None), 'ner_labels': Sequence(feature=Value(dtype='string', id=None), length=-1, id=None), 'relations': [{'head': Value(dtype='int64', id=None), 'child': Value(dtype='int64', id=None), 'head_span': {'start': Value(dtype='int64', id=None), 'end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None)}, 'child_span': {'start': Value(dtype='int64', id=None), 'end': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'label': Value(dtype='string', id=None)}, 'color': Value(dtype='string', id=None), 'label': Value(dtype='string', id=None)}], 'spans': [{'text': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'token_start': Value(dtype='int64', id=None), 'token_end': Value(dtype='int64', id=None), 'end': Value(dtype='int64', id=None), 'type': Value(dtype='string', id=None), 'label': Value(dtype='string', id=None)}], 'text': Value(dtype='string', id=None), 'tokens': [{'text': Value(dtype='string', id=None), 'start': Value(dtype='int64', id=None), 'end': Value(dtype='int64', id=None), 'id': Value(dtype='int64', id=None), 'ws': Value(dtype='bool', id=None), 'disabled': Value(dtype='bool', id=None)}]}\r\nE with type\r\nE struct<_input_hash: int64, _task_hash: int64, _view_id: string, answer: string, encoding__ids: list, encoding__offsets: list>, encoding__overflowing: list, encoding__tokens: list, encoding__words: list, ner_ids: list, ner_labels: list, relations: list, child_span: struct, color: string, label: string>>, spans: list>, text: string, tokens: list>>\r\n\r\n..\/..\/..\/..\/..\/.virtualenvs\/tf_ner_rel_lib\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py:274: ValueError\r\n```\r\n## Versions\r\n- Datasets: 1.6.1\r\n- Python: 3.8.5 (default, Jan 26 2021, 10:01:04) \r\n[Clang 12.0.0 (clang-1200.0.32.2)]\r\n- Platform: macOS-10.15.7-x86_64-i386-64bit\r\n\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2266\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2266","id":867864353,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIzNDY1OTI5","number":2266,"title":"Make tests run faster","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-26T15:55:40Z","updated_at":"2021-04-29T10:00:13Z","closed_at":"2021-04-29T10:00:04Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2266","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2266","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2266.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2266.patch"},"body":"From 7min to 2min to run pytest.\r\nIdeally we should keep the whole CI run time below 10min.\r\n\r\nIn this PR I removed the remote tests that were never used.\r\nI also replaced nested parametrized tests with unit tests.\r\nThis makes me think that we could still add more high level tests to check for a few combinations of parameters (but not all of them since there are too many of them).\r\nLet me know what you think\r\n\r\nFinally in another PR we can also separate in two circleci jobs:\r\n- the tests of the code code of the lib\r\n- the tests of the all the dataset\/metric scripts.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2265\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2265","id":867490646,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIzMTUyOTg5","number":2265,"title":"Update black","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-26T09:35:09Z","updated_at":"2021-04-26T09:47:48Z","closed_at":"2021-04-26T09:47:47Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2265","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2265","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2265.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2265.patch"},"body":"Latest black version 21.4b0 requires to reformat most dataset scripts and also the core code of the lib.\r\nThis makes the CI currently fail on master","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2264\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2264","id":867476228,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIzMTQwODA1","number":2264,"title":"Fix memory issue in multiprocessing: Don't pickle table index","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-26T09:21:35Z","updated_at":"2021-04-26T10:30:28Z","closed_at":"2021-04-26T10:08:14Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2264","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2264","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2264.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2264.patch"},"body":"The table index is currently being pickled when doing multiprocessing, which brings all the record batches of the dataset in memory.\r\n\r\nI fixed that by not pickling the index attributes. Therefore each process has to rebuild the index when unpickling the table.\r\n\r\nFix issue #2256\r\n\r\nWe'll do a patch release asap !","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2263\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2263","id":867420912,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIzMDk0NTcy","number":2263,"title":"test data added, dataset_infos updated","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-26T08:27:18Z","updated_at":"2021-04-29T09:30:21Z","closed_at":"2021-04-29T09:30:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2263","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2263","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2263.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2263.patch"},"body":"Fixes #2262. Thanks for pointing out issue with dataset @jinmang2!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2262\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2262","id":867325351,"node_id":"MDU6SXNzdWU4NjczMjUzNTE=","number":2262,"title":"NewsPH NLI dataset script fails to access test data.","user":{"login":"jinmang2","id":37775784,"node_id":"MDQ6VXNlcjM3Nzc1Nzg0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37775784?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jinmang2","html_url":"https:\/\/github.com\/jinmang2","followers_url":"https:\/\/api.github.com\/users\/jinmang2\/followers","following_url":"https:\/\/api.github.com\/users\/jinmang2\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jinmang2\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jinmang2\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jinmang2\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jinmang2\/orgs","repos_url":"https:\/\/api.github.com\/users\/jinmang2\/repos","events_url":"https:\/\/api.github.com\/users\/jinmang2\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jinmang2\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-26T06:44:41Z","updated_at":"2021-04-29T09:32:03Z","closed_at":"2021-04-29T09:30:20Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"In Newsph-NLI Dataset (#1192), it fails to access test data.\r\n\r\nAccording to the script below, the download manager will download the train data when trying to download the test data. \r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/2a2dd6316af2cc7fdf24e4779312e8ee0c7ed98b\/datasets\/newsph_nli\/newsph_nli.py#L71\r\n\r\nIf you download it according to the script above, you can see that train and test receive the same data as shown below.\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> newsph_nli = load_dataset(path=\".\/datasets\/newsph_nli.py\")\r\n>>> newsph_nli\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 420000\r\n })\r\n test: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 420000\r\n })\r\n validation: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 90000\r\n })\r\n})\r\n>>> newsph_nli[\"train\"][0]\r\n{'hypothesis': 'Ito ang dineklara ni Atty. Romulo Macalintal, abogado ni Robredo, kaugnay ng pagsisimula ng preliminary conference ngayong hapon sa Presidential Electoral Tribunal (PET).',\r\n 'label': 1,\r\n 'premise': '\"Hindi ko ugali ang mamulitika; mas gusto kong tahimik na magtrabaho. Pero sasabihin ko ito ngayon: ang tapang, lakas, at diskarte, hindi nadadaan sa mapanirang salita. Ang kailangan ng taumbayan ay tapang sa gawa,\" ayon kay Robredo sa inilabas nitong statement.'}\r\n>>> newsph_nli[\"test\"][0]\r\n{'hypothesis': 'Ito ang dineklara ni Atty. Romulo Macalintal, abogado ni Robredo, kaugnay ng pagsisimula ng preliminary conference ngayong hapon sa Presidential Electoral Tribunal (PET).',\r\n 'label': 1,\r\n 'premise': '\"Hindi ko ugali ang mamulitika; mas gusto kong tahimik na magtrabaho. Pero sasabihin ko ito ngayon: ang tapang, lakas, at diskarte, hindi nadadaan sa mapanirang salita. Ang kailangan ng taumbayan ay tapang sa gawa,\" ayon kay Robredo sa inilabas nitong statement.'}\r\n```\r\n\r\nIn local, I modified the code of the source as below and got the correct result.\r\n```python\r\n71 test_path = os.path.join(download_path, \"test.csv\") \r\n```\r\n```python\r\n>>> from datasets import load_dataset\r\n>>> newsph_nli = load_dataset(path=\".\/datasets\/newsph_nli.py\")\r\n>>> newsph_nli\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 420000\r\n })\r\n test: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 9000\r\n })\r\n validation: Dataset({\r\n features: ['premise', 'hypothesis', 'label'],\r\n num_rows: 90000\r\n })\r\n})\r\n>>> newsph_nli[\"train\"][0]\r\n{'hypothesis': 'Ito ang dineklara ni Atty. Romulo Macalintal, abogado ni Robredo, kaugnay ng pagsisimula ng preliminary conference ngayong hapon sa Presidential Electoral Tribunal (PET).',\r\n 'label': 1,\r\n 'premise': '\"Hindi ko ugali ang mamulitika; mas gusto kong tahimik na magtrabaho. Pero sasabihin ko ito ngayon: ang tapang, lakas, at diskarte, hindi nadadaan sa mapanirang salita. Ang kailangan ng taumbayan ay tapang sa gawa,\" ayon kay Robredo sa inilabas nitong statement.'}\r\n>>> newsph_nli[\"test\"][0]\r\n{'hypothesis': '-- JAI (@JaiPaller) September 13, 2019',\r\n 'label': 1,\r\n 'premise': 'Pinag-iingat ng Konsulado ng Pilipinas sa Dubai ang publiko, partikular ang mga donor, laban sa mga scam na gumagamit ng mga charitable organization.'}\r\n```\r\n\r\nI don't have experience with open source pull requests, so I suggest that you reflect them in the source.\r\n\r\nThank you for reading :)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2261\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2261","id":867088818,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyODIxNzQw","number":2261,"title":"Improve ReadInstruction logic and update docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-25T19:07:26Z","updated_at":"2021-05-17T18:24:44Z","closed_at":"2021-05-17T16:48:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2261","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2261","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2261.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2261.patch"},"body":"Improve ReadInstruction logic and docs.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2260\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2260","id":866961697,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyNzMwODYx","number":2260,"title":"GooAQ dataset added","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-25T09:26:48Z","updated_at":"2021-05-07T08:36:17Z","closed_at":"2021-05-07T08:36:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2260","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2260","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2260.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2260.patch"},"body":"@lhoestq here the dataset is stored with Git LFS. Should I add option for manual downloading of dataset using `git lfs pull` post repo cloning or can we accommodate this in the current `download_and_extract`?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2259\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2259","id":866880092,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyNjc2ODA0","number":2259,"title":"Add support for Split.ALL","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-25T01:45:42Z","updated_at":"2021-06-28T08:21:27Z","closed_at":"2021-06-28T08:21:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2259","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2259","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2259.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2259.patch"},"body":"The title says it all.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2258\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2258","id":866870588,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyNjcxNTQy","number":2258,"title":"Fix incorrect update_metadata_with_features calls in ArrowDataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-25T00:48:38Z","updated_at":"2021-04-26T17:16:30Z","closed_at":"2021-04-26T16:54:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2258","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2258","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2258.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2258.patch"},"body":"Fixes bugs in the `unpdate_metadata_with_features` calls (caused by changes in #2151)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2257\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2257","id":866755203,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyNTkwMDQw","number":2257,"title":"added metrics for CUAD","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-24T14:09:54Z","updated_at":"2021-04-29T09:53:38Z","closed_at":"2021-04-27T16:16:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2257","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2257","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2257.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2257.patch"},"body":"For now I've added F1, AUPR, Precision at 80% recall, and Precision at 90%. Last 3 metrics were reported in the [paper](https:\/\/arxiv.org\/pdf\/2103.06268.pdf). Please let me know if we require `exact_match` metric too here","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2256\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2256","id":866708609,"node_id":"MDU6SXNzdWU4NjY3MDg2MDk=","number":2256,"title":"Running `datase.map` with `num_proc > 1` uses a lot of memory","user":{"login":"roskoN","id":8143425,"node_id":"MDQ6VXNlcjgxNDM0MjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8143425?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/roskoN","html_url":"https:\/\/github.com\/roskoN","followers_url":"https:\/\/api.github.com\/users\/roskoN\/followers","following_url":"https:\/\/api.github.com\/users\/roskoN\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/roskoN\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/roskoN\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/roskoN\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/roskoN\/orgs","repos_url":"https:\/\/api.github.com\/users\/roskoN\/repos","events_url":"https:\/\/api.github.com\/users\/roskoN\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/roskoN\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-04-24T09:56:20Z","updated_at":"2021-04-26T17:12:15Z","closed_at":"2021-04-26T17:12:15Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\nRunning `datase.map` with `num_proc > 1` leads to a tremendous memory usage that requires swapping on disk and it becomes very slow.\r\n\r\n## Steps to reproduce the bug\r\n```python\r\nfrom datasets import load_dataset\r\n\r\ndstc8_datset = load_dataset(\"roskoN\/dstc8-reddit-corpus\", keep_in_memory=False)\r\n\r\n\r\ndef _prepare_sample(batch):\r\n return {\"input_ids\": list(), \"attention_mask\": list()}\r\n\r\n\r\nfor split_name, dataset_split in list(dstc8_datset.items()):\r\n print(f\"Processing {split_name}\")\r\n encoded_dataset_split = dataset_split.map(\r\n function=_prepare_sample,\r\n batched=True,\r\n num_proc=4,\r\n remove_columns=dataset_split.column_names,\r\n batch_size=10,\r\n writer_batch_size=10,\r\n keep_in_memory=False,\r\n )\r\n print(encoded_dataset_split)\r\n\r\n path = f\".\/data\/encoded_{split_name}\"\r\n\r\n encoded_dataset_split.save_to_disk(path)\r\n```\r\n\r\n## Expected results\r\nMemory usage should stay within reasonable boundaries.\r\n\r\n\r\n## Actual results\r\nThis is htop-output from running the provided script.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/8143425\/115954836-66954980-a4f3-11eb-8340-0153bdc3a475.png)\r\n\r\n## Versions\r\n```\r\n- Datasets: 1.6.0\r\n- Python: 3.8.8 (default, Apr 13 2021, 19:58:26)\r\n[GCC 7.3.0]\r\n- Platform: Linux-4.19.128-microsoft-standard-x86_64-with-glibc2.10\r\n```\r\nRunning on WSL2\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2255\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2255","id":866242892,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyMTc0Njg4","number":2255,"title":"Task casting for text classification & question answering","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":15,"created_at":"2021-04-23T16:00:41Z","updated_at":"2021-05-18T13:31:36Z","closed_at":"2021-05-18T13:31:35Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2255","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2255","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2255.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2255.patch"},"body":"This PR implements task preparation for a given task, in the continuation of #2143 \r\n\r\nTask taxonomy follows \ud83e\udd17 Transformers's pipelines taxonomy: https:\/\/github.com\/huggingface\/transformers\/tree\/master\/src\/transformers\/pipelines\r\n\r\nEdit by @lewtun:\r\n\r\nThis PR implements support for the following tasks:\r\n\r\n* `text-classification`\r\n* `question-answering`\r\n\r\nThe intended usage is as follows:\r\n\r\n```python\r\n# Load a dataset with default column names \/ features\r\nds = load_dataset(\"dataset_name\")\r\n# Cast column names \/ features to schema. Casting is defined in the dataset's `DatasetInfo`\r\nds = ds.prepare_for_task(task=\"text-classification\")\r\n# Casting can also be realised during load\r\nds = load_dataset(\"dataset_name\", task=\"text-classification\")\r\n# We can also combine shared tasks across dataset concatenation\r\nds1 = load_dataset(\"dataset_name_1\", task=\"text-classification\")\r\nds2 = load_dataset(\"dataset_name_2\", task=\"text-classification\")\r\n# If the tasks have the same schema, so will `ds_concat`\r\nds_concat = concatenate_datasets([ds1, ds2])\r\n```\r\n\r\nNote that the current implementation assumes that `DatasetInfo.task_templates` has been pre-defined by the user \/ contributor when overriding the `MyDataset(GeneratorBasedBuilder)._info` function.\r\n\r\nAs pointed out by @SBrandeis, for evaluation we'll need a way to detect which datasets are already have a compatible schema so we don't have to edit hundreds of dataset scripts. One possibility is to check if the schema features are a subset of the dataset ones, e.g.\r\n\r\n```python\r\nsquad = load_dataset(\".\/datasets\/squad\", split=\"train\")\r\nqa = QuestionAnswering()\r\nschema = Features({**qa.input_schema, **qa.label_schema})\r\nassert all(item in squad.features.items() for item in schema.items())\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2254\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2254","id":866169312,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyMTE1NDI0","number":2254,"title":"Update format, fingerprint and indices after add_item","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-23T14:31:49Z","updated_at":"2021-04-27T16:30:49Z","closed_at":"2021-04-27T16:30:48Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2254","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2254","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2254.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2254.patch"},"body":"Added fingerprint and format update wrappers + update the indices by adding the index of the newly added item in the table.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2253\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2253","id":866034321,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIyMDA2Njg3","number":2253,"title":"Perform minor refactoring: use config","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-23T11:45:47Z","updated_at":"2021-05-27T09:12:45Z","closed_at":"2021-04-27T15:02:59Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2253","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2253","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2253.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2253.patch"},"body":"Perform minor refactoring related to `config`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2252\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2252","id":865870710,"node_id":"MDU6SXNzdWU4NjU4NzA3MTA=","number":2252,"title":"Slow dataloading with big datasets issue persists","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":30,"created_at":"2021-04-23T08:18:20Z","updated_at":"2021-07-23T08:28:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nI reported too slow data fetching when data is large(#2210) a couple of weeks ago, and @lhoestq referred me to the fix (#2122).\r\nHowever, the problem seems to persist. Here is the profiled results:\r\n\r\n\r\n1) Running with 60GB\r\n```\r\nAction \t| Mean duration (s)\t|Num calls \t| Total time (s) \t| Percentage % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nTotal \t| - \t|_ \t| 517.96 \t| 100 % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nmodel_backward \t| 0.26144 \t|100 \t| 26.144 \t| 5.0475 \t|\r\nmodel_forward \t| 0.11123 \t|100 \t| 11.123 \t| 2.1474 \t|\r\nget_train_batch \t| 0.097121 \t|100 \t| 9.7121 \t| 1.8751 \t|\r\n```\r\n\r\n\r\n3) Running with 600GB, datasets==1.6.0\r\n```\r\nAction \t| Mean duration (s)\t|Num calls \t| Total time (s) \t| Percentage % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nTotal \t| - \t|_ \t| 4563.2 \t| 100 % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nget_train_batch \t| 5.1279 \t|100 \t| 512.79 \t| 11.237 \t|\r\nmodel_backward \t| 4.8394 \t|100 \t| 483.94 \t| 10.605 \t|\r\nmodel_forward \t| 0.12162 \t|100 \t| 12.162 \t| 0.26652 \t|\r\n```\r\n\r\nI see that `get_train_batch` lags when data is large. Could this be related to different issues?\r\nI would be happy to provide necessary information to investigate.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2251\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2251","id":865848705,"node_id":"MDU6SXNzdWU4NjU4NDg3MDU=","number":2251,"title":"while running run_qa.py, ran into a value error","user":{"login":"nlee0212","id":44570724,"node_id":"MDQ6VXNlcjQ0NTcwNzI0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44570724?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nlee0212","html_url":"https:\/\/github.com\/nlee0212","followers_url":"https:\/\/api.github.com\/users\/nlee0212\/followers","following_url":"https:\/\/api.github.com\/users\/nlee0212\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nlee0212\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nlee0212\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nlee0212\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nlee0212\/orgs","repos_url":"https:\/\/api.github.com\/users\/nlee0212\/repos","events_url":"https:\/\/api.github.com\/users\/nlee0212\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nlee0212\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-23T07:51:03Z","updated_at":"2021-04-23T07:51:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"command:\r\n\r\npython3 run_qa.py --model_name_or_path hyunwoongko\/kobart --dataset_name squad_kor_v2 --do_train --do_eval --per_device_train_batch_size 8 --learning_rate 3e-5 --num_train_epochs 3 --max_seq_length 512 --doc_stride 128 --output_dir \/tmp\/debug_squad\/\r\n\r\nerror: \r\n\r\nValueError: External features info don't match the dataset:\r\nGot\r\n{'id': Value(dtype='string', id=None), 'title': Value(dtype='string', id=None), 'context': Value(dtype='string', id=None), 'question': Value(dtype='string', id=None), 'answer': {'text': Value(dtype='string', id=None), 'answer_start': Value(dtype='int32', id=None), 'html_answer_start': Value(dtype='int32', id=None)}, 'url': Value(dtype='string', id=None), 'raw_html': Value(dtype='string', id=None)}\r\nwith type\r\nstruct, context: string, id: string, question: string, raw_html: string, title: string, url: string>\r\n\r\nbut expected something like\r\n{'answer': {'answer_start': Value(dtype='int32', id=None), 'html_answer_start': Value(dtype='int32', id=None), 'text': Value(dtype='string', id=None)}, 'context': Value(dtype='string', id=None), 'id': Value(dtype='string', id=None), 'question': Value(dtype='string', id=None), 'raw_html': Value(dtype='string', id=None), 'title': Value(dtype='string', id=None), 'url': Value(dtype='string', id=None)}\r\nwith type\r\nstruct, context: string, id: string, question: string, raw_html: string, title: string, url: string>\r\n\r\nI didn't encounter this error 4 hours ago. any solutions for this kind of issue?\r\nlooks like gained dataset format refers to 'Data Fields', while expected refers to 'Data Instances'.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2250\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2250","id":865402449,"node_id":"MDU6SXNzdWU4NjU0MDI0NDk=","number":2250,"title":"some issue in loading local txt file as Dataset for run_mlm.py","user":{"login":"alighofrani95","id":14968123,"node_id":"MDQ6VXNlcjE0OTY4MTIz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/14968123?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alighofrani95","html_url":"https:\/\/github.com\/alighofrani95","followers_url":"https:\/\/api.github.com\/users\/alighofrani95\/followers","following_url":"https:\/\/api.github.com\/users\/alighofrani95\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alighofrani95\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alighofrani95\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alighofrani95\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alighofrani95\/orgs","repos_url":"https:\/\/api.github.com\/users\/alighofrani95\/repos","events_url":"https:\/\/api.github.com\/users\/alighofrani95\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alighofrani95\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-22T19:39:13Z","updated_at":"2021-04-22T21:11:40Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"![image](https:\/\/user-images.githubusercontent.com\/14968123\/115773877-18cef300-a3c6-11eb-8e58-a9cbfd1001ec.png)\r\n\r\nfirst of all, I tried to load 3 .txt files as a dataset (sure that the directory and permission is OK.), I face with the below error.\r\n\r\n> FileNotFoundError: [Errno 2] No such file or directory: 'c'\r\n\r\nby removing one of the training .txt files It's fixed and although if I put all file as training it's ok\r\n![image](https:\/\/user-images.githubusercontent.com\/14968123\/115774207-867b1f00-a3c6-11eb-953b-905cfb112d25.png)\r\n![image](https:\/\/user-images.githubusercontent.com\/14968123\/115774264-9b57b280-a3c6-11eb-9f36-7b109f0e5a31.png)\r\n\r\n\r\nafter this, my question is how could I use this defined Dataset for run_mlm.py for from scratch pretraining.\r\nby using --train_file path_to_train_file just can use one .txt , .csv or, .json file. I tried to set my defined Dataset as --dataset_name but the below issue occurs.\r\n\r\n\r\n> Traceback (most recent call last):\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py\", line 336, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py\", line 291, in cached_path\r\n use_auth_token=download_config.use_auth_token,\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py\", line 621, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/dataset\/dataset.py\r\n\r\n> During handling of the above exception, another exception occurred:\r\n\r\n> Traceback (most recent call last):\r\n File \"run_mlm.py\", line 486, in \r\n main()\r\n File \"run_mlm.py\", line 242, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name, cache_dir=model_args.cache_dir)\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py\", line 719, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py\", line 347, in prepare_module\r\n combined_path, github_file_path\r\nFileNotFoundError: Couldn't find file locally at dataset\/dataset.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.6.0\/datasets\/dataset\/dataset.py.\r\nThe file is also not present on the master branch on github.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2249\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2249","id":865257826,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIxMzU1MzE3","number":2249,"title":"Allow downloading\/processing\/caching only specific splits","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":5,"closed_issues":2,"state":"open","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-08-04T17:03:52Z","due_on":"2021-08-21T07:00:00Z","closed_at":null},"comments":2,"created_at":"2021-04-22T17:51:44Z","updated_at":"2021-07-21T15:36:05Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2249","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2249","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2249.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2249.patch"},"body":"Allow downloading\/processing\/caching only specific splits without downloading\/processing\/caching the other splits.\r\n\r\nThis PR implements two steps to handle only specific splits:\r\n- it allows processing\/caching only specific splits into Arrow files\r\n- for some simple cases, it allows downloading only specific splits (which is more intricate as it depends on the user-defined method `_split_generators`)\r\n\r\nThis PR makes several assumptions:\r\n- `DownloadConfig` contains the configuration settings for downloading\r\n- the parameter `split` passed to `load_dataset` is just a parameter for loading (from cache), not for downloading","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2248\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2248","id":864853447,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIxMDEyNzg5","number":2248,"title":"Implement Dataset to JSON","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/3","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3\/labels","id":6644287,"node_id":"MDk6TWlsZXN0b25lNjY0NDI4Nw==","number":3,"title":"1.7","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":3,"state":"closed","created_at":"2021-04-09T13:16:31Z","updated_at":"2021-05-31T16:20:53Z","due_on":"2021-05-14T07:00:00Z","closed_at":"2021-05-31T16:20:53Z"},"comments":0,"created_at":"2021-04-22T11:46:51Z","updated_at":"2021-04-27T15:29:21Z","closed_at":"2021-04-27T15:29:20Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2248","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2248","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2248.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2248.patch"},"body":"Implement `Dataset.to_json`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2247\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2247","id":864817520,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIwOTgzNzY3","number":2247,"title":"Implement Dataset from Parquet","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":5,"closed_issues":2,"state":"open","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-08-04T17:03:52Z","due_on":"2021-08-21T07:00:00Z","closed_at":null},"comments":2,"created_at":"2021-04-22T11:01:38Z","updated_at":"2021-07-26T13:28:52Z","closed_at":"2021-07-26T13:28:51Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2247","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2247","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2247.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2247.patch"},"body":"Implement instantiation of Dataset from Parquet file.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2246\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2246","id":864220031,"node_id":"MDExOlB1bGxSZXF1ZXN0NjIwNDg3OTUw","number":2246,"title":"Faster map w\/ input_columns & faster slicing w\/ Iterable keys","user":{"login":"norabelrose","id":39116809,"node_id":"MDQ6VXNlcjM5MTE2ODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39116809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/norabelrose","html_url":"https:\/\/github.com\/norabelrose","followers_url":"https:\/\/api.github.com\/users\/norabelrose\/followers","following_url":"https:\/\/api.github.com\/users\/norabelrose\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/norabelrose\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/norabelrose\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/norabelrose\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/norabelrose\/orgs","repos_url":"https:\/\/api.github.com\/users\/norabelrose\/repos","events_url":"https:\/\/api.github.com\/users\/norabelrose\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/norabelrose\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-21T19:49:07Z","updated_at":"2021-04-26T16:13:59Z","closed_at":"2021-04-26T16:13:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2246","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2246","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2246.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2246.patch"},"body":"@lhoestq Fixes #2193 \r\n\r\n- `map` now uses `with_format` to only load needed columns in memory when `input_columns` is set\r\n- Slicing datasets with Iterables of indices now uses a new `Table.fast_gather` method, implemented with `np.searchsorted`, to find the appropriate batch indices all at once. `pa.concat_tables` is no longer used for this; we just call `pa.Table.from_batches` with a list of all the batch slices.\r\n\r\nTogether these changes have sped up batched `map()` calls over subsets of columns quite considerably in my initial testing.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2245\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2245","id":863191655,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE5NjQzMjQ3","number":2245,"title":"Add `key` type and duplicates verification with hashing","user":{"login":"NikhilBartwal","id":42388668,"node_id":"MDQ6VXNlcjQyMzg4NjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42388668?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NikhilBartwal","html_url":"https:\/\/github.com\/NikhilBartwal","followers_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/followers","following_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/orgs","repos_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/repos","events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":17,"created_at":"2021-04-20T20:03:19Z","updated_at":"2021-05-10T18:04:37Z","closed_at":"2021-05-10T17:31:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2245","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2245","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2245.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2245.patch"},"body":"Closes #2230 \r\nThere is currently no verification for the data type and the uniqueness of the keys yielded by the `dataset_builder`.\r\nThis PR is currently a work in progress with the following goals:\r\n\r\n- [x] Adding `hash_salt` to `ArrowWriter` so that the keys belonging to different splits have different hash\r\n- [x] Add `key` arrtibute to `ArrowWriter.write()` for hashing\r\n- [x] Add a hashing class which takes an input key of certain type (`str`\/`int`\/anything convertible to string) and produces a 128-bit hash using `hashlib.md5`\r\n- [x] Creating a function giving a custom error message when non-unique keys are found \r\n **[This will take care of type-checking for keys]**\r\n- [x] Checking for duplicate keys in `writer.write()` for each batch\r\n\r\n[**NOTE**: This PR is currently concerned with `GeneratorBasedBuilder` only, for simplification. A subsequent PR will be made in future for `ArrowBasedBuilder`]\r\n\r\n@lhoestq Thank you for the feedback. It would be great to have your guidance on this!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2244\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2244","id":863029946,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE5NTAyODc0","number":2244,"title":"Set specific cache directories per test function call","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/7","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/7\/labels","id":6931350,"node_id":"MDk6TWlsZXN0b25lNjkzMTM1MA==","number":7,"title":"1.11","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":5,"closed_issues":2,"state":"open","created_at":"2021-07-09T05:49:00Z","updated_at":"2021-08-04T17:03:52Z","due_on":"2021-08-21T07:00:00Z","closed_at":null},"comments":4,"created_at":"2021-04-20T17:06:22Z","updated_at":"2021-07-21T15:36:04Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2244","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2244","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2244.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2244.patch"},"body":"Implement specific cache directories (datasets, metrics and modules) per test function call.\r\n\r\nCurrently, the cache directories are set within the temporary test directory, but they are shared across all test function calls.\r\n\r\nThis PR implements specific cache directories for each test function call, so that tests are atomic and there are no side effects.\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2243\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2243","id":862909389,"node_id":"MDU6SXNzdWU4NjI5MDkzODk=","number":2243,"title":"Map is slow and processes batches one after another","user":{"login":"villmow","id":2743060,"node_id":"MDQ6VXNlcjI3NDMwNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2743060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/villmow","html_url":"https:\/\/github.com\/villmow","followers_url":"https:\/\/api.github.com\/users\/villmow\/followers","following_url":"https:\/\/api.github.com\/users\/villmow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/villmow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/villmow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/villmow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/villmow\/orgs","repos_url":"https:\/\/api.github.com\/users\/villmow\/repos","events_url":"https:\/\/api.github.com\/users\/villmow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/villmow\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-20T14:58:20Z","updated_at":"2021-05-03T17:54:33Z","closed_at":"2021-05-03T17:54:32Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nI have a somewhat unclear bug to me, where I can't figure out what the problem is. The code works as expected on a small subset of my dataset (2000 samples) on my local machine, but when I execute the same code with a larger dataset (1.4 million samples) this problem occurs. Thats why I can't give exact steps to reproduce, I'm sorry. \r\n\r\nI process a large dataset in a two step process. I first call map on a dataset I load from disk and create a new dataset from it. This works like expected and `map` uses all workers I started it with. Then I process the dataset created by the first step, again with `map`, which is really slow and starting only one or two process at a time. Number of processes is the same for both steps.\r\n\r\npseudo code:\r\n```python\r\nds = datasets.load_from_disk(\"path\")\r\nnew_dataset = ds.map(work, batched=True, ...) # fast uses all processes\r\nfinal_dataset = new_dataset.map(work2, batched=True, ...) # slow starts one process after another\r\n```\r\n\r\n## Expected results\r\nSecond stage should be as fast as the first stage.\r\n\r\n## Versions\r\nPaste the output of the following code:\r\n- Datasets: 1.5.0\r\n- Python: 3.8.8 (default, Feb 24 2021, 21:46:12)\r\n- Platform: Linux-5.4.0-60-generic-x86_64-with-glibc2.10 \r\n\r\nDo you guys have any idea? Thanks a lot!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2242\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2242","id":862870205,"node_id":"MDU6SXNzdWU4NjI4NzAyMDU=","number":2242,"title":"Link to datasets viwer on Quick Tour page returns \"502 Bad Gateway\"","user":{"login":"martavillegas","id":6735707,"node_id":"MDQ6VXNlcjY3MzU3MDc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6735707?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/martavillegas","html_url":"https:\/\/github.com\/martavillegas","followers_url":"https:\/\/api.github.com\/users\/martavillegas\/followers","following_url":"https:\/\/api.github.com\/users\/martavillegas\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/martavillegas\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/martavillegas\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/martavillegas\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/martavillegas\/orgs","repos_url":"https:\/\/api.github.com\/users\/martavillegas\/repos","events_url":"https:\/\/api.github.com\/users\/martavillegas\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/martavillegas\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-20T14:19:51Z","updated_at":"2021-04-20T15:02:45Z","closed_at":"2021-04-20T15:02:45Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Link to datasets viwer (https:\/\/huggingface.co\/datasets\/viewer\/) on Quick Tour page (https:\/\/huggingface.co\/docs\/datasets\/quicktour.html) returns \"502 Bad Gateway\"\r\n\r\nThe same error with https:\/\/huggingface.co\/datasets\/viewer\/?dataset=glue&config=mrpc ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2241\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2241","id":862696460,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE5MjI0MzIw","number":2241,"title":"Add SLR32 to OpenSLR","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-20T11:02:45Z","updated_at":"2021-04-23T16:21:24Z","closed_at":"2021-04-23T15:36:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2241","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2241","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2241.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2241.patch"},"body":"I would like to add SLR32 to OpenSLR. It contains four South African languages: Afrikaans, Sesotho, Setswana and isiXhosa","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2240\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2240","id":862537856,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE5MDkyODc5","number":2240,"title":"Clarify how to load wikihow","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-20T08:02:58Z","updated_at":"2021-04-21T09:54:57Z","closed_at":"2021-04-21T09:54:57Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2240","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2240","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2240.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2240.patch"},"body":"Explain clearer how to load the dataset in the manual download instructions.\r\n\r\nEn relation with #2239.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2239\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2239","id":861904306,"node_id":"MDU6SXNzdWU4NjE5MDQzMDY=","number":2239,"title":"Error loading wikihow dataset","user":{"login":"odellus","id":4686956,"node_id":"MDQ6VXNlcjQ2ODY5NTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4686956?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/odellus","html_url":"https:\/\/github.com\/odellus","followers_url":"https:\/\/api.github.com\/users\/odellus\/followers","following_url":"https:\/\/api.github.com\/users\/odellus\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/odellus\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/odellus\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/odellus\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/odellus\/orgs","repos_url":"https:\/\/api.github.com\/users\/odellus\/repos","events_url":"https:\/\/api.github.com\/users\/odellus\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/odellus\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-19T21:02:31Z","updated_at":"2021-04-20T16:33:11Z","closed_at":"2021-04-20T16:33:11Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Describe the bug\r\n\r\nWhen attempting to load wikihow into a dataset with\r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wikihow', data_dir='.\/wikihow')\r\n```\r\nI get the message:\r\n```\r\nAttributeError: 'BuilderConfig' object has no attribute 'filename'\r\n```\r\nat the end of a [full stack trace](https:\/\/gist.github.com\/odellus\/602c3b2de52f541d353b1022f320ffc2).\r\n\r\n## Steps to reproduce the bug\r\n\r\nI have followed the instructions for creating a wikihow dataset. The [wikihow dataset site](https:\/\/huggingface.co\/datasets\/wikihow) says to use \r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wikihow')\r\n```\r\nto load the dataset. I do so and I get the message\r\n```\r\nAssertionError: The dataset wikihow with config all requires manual data.\r\n Please follow the manual download instructions: You need to manually download two wikihow files. An overview of which files to download can be seen at https:\/\/github.com\/mahnazkoupaee\/WikiHow-Dataset.\r\n You need to download the following two files manually:\r\n 1) https:\/\/ucsb.app.box.com\/s\/ap23l8gafpezf4tq3wapr6u8241zz358 and save the file under \/wikihowAll.csv\r\n 2) https:\/\/ucsb.app.box.com\/s\/7yq601ijl1lzvlfu4rjdbbxforzd2oag and save the file under \/wikihowSep.csv\r\n\r\n The can e.g. be \"~\/manual_wikihow_data\".\r\n\r\n Wikihow can then be loaded using the following command `datasets.load_dataset(\"wikihow\", data_dir=\"\")`.\r\n .\r\n Manual data can be loaded with `datasets.load_dataset(wikihow, data_dir='')\r\n```\r\n\r\nSo I create a directory `.\/wikihow` and download `wikihowAll.csv` and `wikihowSep.csv` into the new directory.\r\n\r\nThen I run \r\n```python\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wikihow', data_dir='.\/wikihow')\r\n```\r\n\r\nthat's when I get the [stack trace](https:\/\/gist.github.com\/odellus\/602c3b2de52f541d353b1022f320ffc2)\r\n\r\n## Expected results\r\nI expected it to load the downloaded files into a dataset.\r\n\r\n## Actual results\r\n```python\r\nUsing custom data configuration default-data_dir=.%2Fwikihow\r\nDownloading and preparing dataset wikihow\/default (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/azureuser\/.cache\/huggingface\/datasets\/wikihow\/default-data_dir=.%2Fwikihow\/0.0.0\/58f42f8f0e4d459811a0f69aaab35870093830ccd58006769e7e1eb3e0e686c2... ---------------------------------------------------------------------------\r\nAttributeError\r\nTraceback (most recent call last)\r\n in \r\n----> 1 dataset = load_dataset('wikihow',data_dir='.\/wikihow')\r\n~\/.local\/lib\/python3.6\/site-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, **config_kwargs)\r\n745 try_from_hf_gcs=try_from_hf_gcs,\r\n746 base_path=base_path,--> \r\n747 use_auth_token=use_auth_token,\r\n748 )\r\n749 \r\n~\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n577 if not downloaded_from_gcs:\r\n578 self._download_and_prepare( -->\r\n579 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs \r\n580 ) \r\n581 # Sync info\r\n~\/.local\/lib\/python3.6\/site-packages\/datasets\/builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n632 split_dict = SplitDict(dataset_name=self.name)\r\n633 split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs) -->\r\n634 split_generators = self._split_generators(dl_manager, **split_generators_kwargs) \r\n635 \r\n636 # Checksums verification\r\n~\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wikihow\/58f42f8f0e4d459811a0f69aaab35870093830ccd58006769e7e1eb3e0e686c2\/wikihow.py in _split_generators(self, dl_manager)\r\n132\r\n133 path_to_manual_file = os.path.join(\r\n--> 134 os.path.abspath(os.path.expanduser(dl_manager.manual_dir)), self.config.filename \r\n135 ) \r\n136\r\nAttributeError: 'BuilderConfig' object has no attribute 'filename'\r\n```\r\n## Versions\r\nPaste the output of the following code:\r\n```python\r\nimport datasets\r\nimport sys\r\nimport platform\r\n\r\nprint(f\"\"\"\r\n- Datasets: {datasets.__version__}\r\n- Python: {sys.version}\r\n- Platform: {platform.platform()}\r\n\"\"\")\r\n```\r\n```\r\n- Datasets: 1.5.0\r\n- Python: 3.6.9 (default, Jan 26 2021, 15:33:00) [GCC 8.4.0]\r\n- Platform: Linux-5.4.0-1046-azure-x86_64-with-Ubuntu-18.04-bionic\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2238\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2238","id":861518291,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE4MTY5NzM5","number":2238,"title":"NLU evaluation data","user":{"login":"dkajtoch","id":32985207,"node_id":"MDQ6VXNlcjMyOTg1MjA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32985207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dkajtoch","html_url":"https:\/\/github.com\/dkajtoch","followers_url":"https:\/\/api.github.com\/users\/dkajtoch\/followers","following_url":"https:\/\/api.github.com\/users\/dkajtoch\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dkajtoch\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dkajtoch\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dkajtoch\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dkajtoch\/orgs","repos_url":"https:\/\/api.github.com\/users\/dkajtoch\/repos","events_url":"https:\/\/api.github.com\/users\/dkajtoch\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dkajtoch\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-19T16:47:20Z","updated_at":"2021-04-23T15:32:05Z","closed_at":"2021-04-23T15:32:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2238","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2238","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2238.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2238.patch"},"body":"New intent classification dataset from https:\/\/github.com\/xliuhw\/NLU-Evaluation-Data","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2237\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2237","id":861427439,"node_id":"MDU6SXNzdWU4NjE0Mjc0Mzk=","number":2237,"title":"Update Dataset.dataset_size after transformed with map","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-19T15:19:38Z","updated_at":"2021-04-20T14:22:05Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"After loading a dataset, if we transform it by using `.map` its `dataset_size` attirbute is not updated.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2236\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2236","id":861388145,"node_id":"MDU6SXNzdWU4NjEzODgxNDU=","number":2236,"title":"Request to add StrategyQA dataset","user":{"login":"sarahwie","id":8027676,"node_id":"MDQ6VXNlcjgwMjc2NzY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8027676?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sarahwie","html_url":"https:\/\/github.com\/sarahwie","followers_url":"https:\/\/api.github.com\/users\/sarahwie\/followers","following_url":"https:\/\/api.github.com\/users\/sarahwie\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sarahwie\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sarahwie\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sarahwie\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sarahwie\/orgs","repos_url":"https:\/\/api.github.com\/users\/sarahwie\/repos","events_url":"https:\/\/api.github.com\/users\/sarahwie\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sarahwie\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-19T14:46:26Z","updated_at":"2021-04-19T14:46:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Request to add StrategyQA dataset\r\n- **Name:** StrategyQA\r\n- **Description:** open-domain QA [(project page)](https:\/\/allenai.org\/data\/strategyqa)\r\n- **Paper:** [url](https:\/\/arxiv.org\/pdf\/2101.02235.pdf)\r\n- **Data:** [here](https:\/\/allenai.org\/data\/strategyqa)\r\n- **Motivation:** uniquely-formulated dataset that also includes a question-decomposition breakdown and associated Wikipedia annotations for each step. Good for multi-hop reasoning modeling.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2235\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2235","id":861040716,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE3Nzc0NDUw","number":2235,"title":"Update README.md","user":{"login":"PierreColombo","id":22492839,"node_id":"MDQ6VXNlcjIyNDkyODM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22492839?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PierreColombo","html_url":"https:\/\/github.com\/PierreColombo","followers_url":"https:\/\/api.github.com\/users\/PierreColombo\/followers","following_url":"https:\/\/api.github.com\/users\/PierreColombo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PierreColombo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PierreColombo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PierreColombo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PierreColombo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PierreColombo\/repos","events_url":"https:\/\/api.github.com\/users\/PierreColombo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PierreColombo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-19T08:21:02Z","updated_at":"2021-04-19T12:49:19Z","closed_at":"2021-04-19T12:49:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2235","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2235","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2235.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2235.patch"},"body":"Adding relevant citations (paper accepted at AAAI 2020 & EMNLP 2020) to the benchmark","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2234\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2234","id":860442246,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE3MzI4NDU3","number":2234,"title":"Fix bash snippet formatting in ADD_NEW_DATASET.md","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-17T16:01:08Z","updated_at":"2021-04-19T10:57:31Z","closed_at":"2021-04-19T07:51:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2234","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2234","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2234.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2234.patch"},"body":"This PR indents the paragraphs around the bash snippets in ADD_NEW_DATASET.md to fix formatting.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2233\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2233","id":860097084,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE3MDYwMTkw","number":2233,"title":"Fix `xnli` dataset tuple key","user":{"login":"NikhilBartwal","id":42388668,"node_id":"MDQ6VXNlcjQyMzg4NjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42388668?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NikhilBartwal","html_url":"https:\/\/github.com\/NikhilBartwal","followers_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/followers","following_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/orgs","repos_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/repos","events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-16T19:12:42Z","updated_at":"2021-04-19T08:56:42Z","closed_at":"2021-04-19T08:56:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2233","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2233","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2233.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2233.patch"},"body":"Closes #2229 \r\nThe `xnli` dataset yields a tuple key in case of `ar` which is inconsistant with the acceptable key types (str\/int).\r\nThe key was thus ported to `str` keeping the original information intact.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2232\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2232","id":860075931,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE3MDQyNTI4","number":2232,"title":"Start filling GLUE dataset card","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-16T18:37:37Z","updated_at":"2021-04-21T09:33:09Z","closed_at":"2021-04-21T09:33:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2232","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2232","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2232.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2232.patch"},"body":"The dataset card was pretty much empty.\r\n\r\nI added the descriptions (mainly from TFDS since the script is the same), and I also added the tasks tags as well as examples for a subset of the tasks.\r\n\r\ncc @sgugger ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2231\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2231","id":859850488,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE2ODYyNTEx","number":2231,"title":"Fix map when removing columns on a formatted dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-16T14:08:55Z","updated_at":"2021-04-16T15:10:05Z","closed_at":"2021-04-16T15:10:04Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2231","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2231","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2231.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2231.patch"},"body":"This should fix issue #2226\r\n\r\nThe `remove_columns` argument was ignored on formatted datasets","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2230\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2230","id":859817159,"node_id":"MDU6SXNzdWU4NTk4MTcxNTk=","number":2230,"title":"Keys yielded while generating dataset are not being checked","user":{"login":"NikhilBartwal","id":42388668,"node_id":"MDQ6VXNlcjQyMzg4NjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42388668?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NikhilBartwal","html_url":"https:\/\/github.com\/NikhilBartwal","followers_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/followers","following_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/orgs","repos_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/repos","events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-04-16T13:29:47Z","updated_at":"2021-05-10T17:31:21Z","closed_at":"2021-05-10T17:31:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"The keys used in the dataset generation script to ensure the same order is generated on every user's end should be checked for their types (i.e either `str` or `int`) as well as whether they are unique or not.\r\nCurrently, the keys are not being checked for any of these, as evident from `xnli' dataset generation:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/56346791aed417306d054d89bd693d6b7eab17f7\/datasets\/xnli\/xnli.py#L196\r\nEven after having a tuple as key, the dataset is generated without any warning.\r\n\r\nAlso, as tested in the case of `anli` dataset (I tweeked the dataset script to use `1` as a key for every example):\r\n```\r\n>>> import datasets\r\n>>> nik = datasets.load_dataset('anli')\r\nDownloading and preparing dataset anli\/plain_text (download: 17.76 MiB, generated: 73.55 MiB, post-processed: Unknown size, total: 91.31 MiB) to C:\\Users\\nikhil\\.cache\\huggingface\\datasets\\anli\\plain_text\\0.1.0\\43fa2c99c10bf8478f1fa0860f7b122c6b277c4c41306255b7641257cf4e3299...\r\n0 examples [00:00, ? examples\/s]1 {'uid': '0fd0abfb-659e-4453-b196-c3a64d2d8267', 'premise': 'The Parma trolleybus system (Italian: \"Rete filoviaria di Parma\" ) forms part of the public transport network of the city and \"comune\" of Parma, in the region of Emilia-Romagna, northern Italy. In operation since 1953, the system presently comprises four urban routes.', 'hypothesis': 'The trolleybus system has over 2 urban routes', 'label': 'entailment', 'reason': ''}\r\n2021-04-16 12:38:14.483968: I tensorflow\/stream_executor\/platform\/default\/dso_loader.cc:49] Successfully opened dynamic library cudart64_110.dll\r\n1 examples [00:01, 1.87s\/ examples]1 {'uid': '7ed72ff4-40b7-4f8a-b1b9-6c612aa62c84', 'premise': 'Alexandra Lendon Bastedo (9 March 1946 \u2013 12 January 2014) was a British actress, best known for her role as secret agent Sharron Macready in the 1968 British espionage\/science fiction adventure series \"The Champions\". She has been cited as a sex symbol of the 1960s and 1970s. Bastedo was a vegetarian and animal welfare advocate.', 'hypothesis': \"Sharron Macready was a popular character through the 1980's.\", 'label': 'neutral', 'reason': ''}\r\n1 {'uid': '5d2930a3-62ac-485d-94d7-4e36cbbcd7b5', 'premise': 'Alexandra Lendon Bastedo (9 March 1946 \u2013 12 January 2014) was a British actress, best known for her role as secret agent Sharron Macready in the 1968 British espionage\/science fiction adventure series \"The Champions\". She has been cited as a sex symbol of the 1960s and 1970s. Bastedo was a vegetarian and animal welfare advocate.', 'hypothesis': \"Bastedo didn't keep any pets because of her views on animal rights.\", 'label': 'neutral', 'reason': ''}\r\n1 {'uid': '324db753-ddc9-4a85-a825-f09e2e5aebdd', 'premise': 'Alexandra Lendon Bastedo (9 March 1946 \u2013 12 January 2014) was a British actress, best known for her role as secret agent Sharron Macready in the 1968 British espionage\/science fiction adventure series \"The Champions\". She has been cited as a sex symbol of the 1960s and 1970s. Bastedo was a vegetarian and animal welfare advocate.', 'hypothesis': 'Alexandra Bastedo was named by her mother.', 'label': 'neutral', 'reason': ''}\r\n1 {'uid': '4874f429-da0e-406a-90c7-22240ff3ddf8', 'premise': 'Alexandra Lendon Bastedo (9 March 1946 \u2013 12 January 2014) was a British actress, best known for her role as secret agent Sharron Macready in the 1968 British espionage\/science fiction adventure series \"The Champions\". She has been cited as a sex symbol of the 1960s and 1970s. Bastedo was a vegetarian and animal welfare advocate.', 'hypothesis': 'Bastedo cared for all the animals that inhabit the earth.', 'label': 'neutral', 'reason': ''}\r\n```\r\nHere also, the dataset was generated successfuly even hough it had same keys without any warning.\r\n\r\nThe reason appears to stem from here:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/56346791aed417306d054d89bd693d6b7eab17f7\/src\/datasets\/builder.py#L988\r\nHere, although it has access to every key, but it is not being checked and the example is written directly:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/56346791aed417306d054d89bd693d6b7eab17f7\/src\/datasets\/builder.py#L992\r\n\r\nI would like to take this issue if you allow me. Thank You!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2229\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2229","id":859810602,"node_id":"MDU6SXNzdWU4NTk4MTA2MDI=","number":2229,"title":"`xnli` dataset creating a tuple key while yielding instead of `str` or `int`","user":{"login":"NikhilBartwal","id":42388668,"node_id":"MDQ6VXNlcjQyMzg4NjY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42388668?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NikhilBartwal","html_url":"https:\/\/github.com\/NikhilBartwal","followers_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/followers","following_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/orgs","repos_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/repos","events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NikhilBartwal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-16T13:21:53Z","updated_at":"2021-04-19T08:56:42Z","closed_at":"2021-04-19T08:56:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"When using `ds = datasets.load_dataset('xnli', 'ar')`, the dataset generation script uses the following section of code in the egging, which yields a tuple key instead of the specified `str` or `int` key:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/56346791aed417306d054d89bd693d6b7eab17f7\/datasets\/xnli\/xnli.py#L196\r\n\r\nSince, community datasets in Tensorflow Datasets also use HF datasets, this causes a Tuple key error while loading HF's `xnli` dataset. \r\nI'm up for sending a fix for this, I think we can simply use `file_idx + \"_\" + row_idx` as a unique key instead of a tuple.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2228\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2228","id":859795563,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE2ODE2MTQz","number":2228,"title":"[WIP] Add ArrayXD support for fixed size list.","user":{"login":"jblemoine","id":22685854,"node_id":"MDQ6VXNlcjIyNjg1ODU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22685854?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jblemoine","html_url":"https:\/\/github.com\/jblemoine","followers_url":"https:\/\/api.github.com\/users\/jblemoine\/followers","following_url":"https:\/\/api.github.com\/users\/jblemoine\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jblemoine\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jblemoine\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jblemoine\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jblemoine\/orgs","repos_url":"https:\/\/api.github.com\/users\/jblemoine\/repos","events_url":"https:\/\/api.github.com\/users\/jblemoine\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jblemoine\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-16T13:04:08Z","updated_at":"2021-04-19T13:02:18Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2228","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2228","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2228.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2228.patch"},"body":"Add support for fixed size list for ArrayXD when shape is known . See https:\/\/github.com\/huggingface\/datasets\/issues\/2146\r\nSince offset are not stored anymore, the file size is now roughly equal to the actual data size. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2227\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2227","id":859771526,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE2Nzk1NjMx","number":2227,"title":"Use update_metadata_with_features decorator in class_encode_column method","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-16T12:31:41Z","updated_at":"2021-04-16T13:49:40Z","closed_at":"2021-04-16T13:49:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2227","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2227","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2227.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2227.patch"},"body":"Following @mariosasko 's comment","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2226\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2226","id":859720302,"node_id":"MDU6SXNzdWU4NTk3MjAzMDI=","number":2226,"title":"Batched map fails when removing all columns","user":{"login":"villmow","id":2743060,"node_id":"MDQ6VXNlcjI3NDMwNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2743060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/villmow","html_url":"https:\/\/github.com\/villmow","followers_url":"https:\/\/api.github.com\/users\/villmow\/followers","following_url":"https:\/\/api.github.com\/users\/villmow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/villmow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/villmow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/villmow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/villmow\/orgs","repos_url":"https:\/\/api.github.com\/users\/villmow\/repos","events_url":"https:\/\/api.github.com\/users\/villmow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/villmow\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-04-16T11:17:01Z","updated_at":"2021-04-16T15:10:41Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi @lhoestq ,\r\n\r\nI'm hijacking this issue, because I'm currently trying to do the approach you recommend:\r\n\r\n> Currently the optimal setup for single-column computations is probably to do something like\r\n> \r\n> ```python\r\n> result = dataset.map(f, input_columns=\"my_col\", remove_columns=dataset.column_names)\r\n> ```\r\n\r\nHere is my code: (see edit, in which I added a simplified version\r\n\r\n```\r\nThis is the error:\r\n```bash\r\npyarrow.lib.ArrowInvalid: Column 1 named tokens expected length 8964 but got length 1000\r\n```\r\nI wonder why this error occurs, when I delete every column? Can you give me a hint?\r\n\r\n### Edit:\r\nI preprocessed my dataset before (using map with the features argument) and saved it to disk. May this be part of the error? I can iterate over the\r\ncomplete dataset and print every sample before calling map. There seems to be no other problem with the dataset.\r\n\r\nI tried to simplify the code that crashes:\r\n\r\n```python\r\n# works\r\nlog.debug(dataset.column_names)\r\nlog.debug(dataset)\r\nfor i, sample in enumerate(dataset):\r\n log.debug(i, sample)\r\n\r\n# crashes\r\ncounted_dataset = dataset.map(\r\n lambda x: {\"a\": list(range(20))},\r\n input_columns=column,\r\n remove_columns=dataset.column_names,\r\n load_from_cache_file=False,\r\n num_proc=num_workers,\r\n batched=True,\r\n)\r\n```\r\n\r\n```\r\npyarrow.lib.ArrowInvalid: Column 1 named tokens expected length 20 but got length 1000\r\n```\r\n\r\nEdit2: \r\n\r\nMay this be a problem with a schema I set when preprocessing the dataset before? I tried to add the `features` argument to the function and then I get a new error:\r\n\r\n```python\r\n# crashes\r\ncounted_dataset = dataset.map(\r\n lambda x: {\"a\": list(range(20))},\r\n input_columns=column,\r\n remove_columns=dataset.column_names,\r\n load_from_cache_file=False,\r\n num_proc=num_workers,\r\n batched=True,\r\n features=datasets.Features(\r\n {\r\n \"a\": datasets.Sequence(datasets.Value(\"int32\"))\r\n }\r\n )\r\n)\r\n```\r\n\r\n```\r\n File \"env\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1704, in _map_single\r\n writer.write_batch(batch)\r\n File \"env\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py\", line 312, in write_batch\r\n col_type = schema.field(col).type if schema is not None else None\r\n File \"pyarrow\/types.pxi\", line 1341, in pyarrow.lib.Schema.field\r\nKeyError: 'Column tokens does not exist in schema'\r\n```\r\n\r\n_Originally posted by @villmow in https:\/\/github.com\/huggingface\/datasets\/issues\/2193#issuecomment-820230874_","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2225\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2225","id":858469561,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1NzAzMTY4","number":2225,"title":"fixed one instance of 'train' to 'test'","user":{"login":"alexwdong","id":46733535,"node_id":"MDQ6VXNlcjQ2NzMzNTM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46733535?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexwdong","html_url":"https:\/\/github.com\/alexwdong","followers_url":"https:\/\/api.github.com\/users\/alexwdong\/followers","following_url":"https:\/\/api.github.com\/users\/alexwdong\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexwdong\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexwdong\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexwdong\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexwdong\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexwdong\/repos","events_url":"https:\/\/api.github.com\/users\/alexwdong\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexwdong\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-15T04:26:40Z","updated_at":"2021-04-15T22:09:50Z","closed_at":"2021-04-15T21:19:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2225","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2225","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2225.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2225.patch"},"body":"I believe this should be 'test' instead of 'train'","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2224\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2224","id":857983361,"node_id":"MDU6SXNzdWU4NTc5ODMzNjE=","number":2224,"title":"Raise error if Windows max path length is not disabled","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-14T14:57:20Z","updated_at":"2021-04-14T14:59:13Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"On startup, raise an error if Windows max path length is not disabled; ask the user to disable it.\r\n\r\nLinked to discussion in #2220.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2223\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2223","id":857870800,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1MjE4MDIz","number":2223,"title":"Set test cache config","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-14T12:55:24Z","updated_at":"2021-04-15T19:11:25Z","closed_at":"2021-04-15T19:11:25Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2223","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2223","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2223.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2223.patch"},"body":"Currently, running the tests populates the default cache directory `\"~\/.cache\"`.\r\n\r\nThis PR monkey-patches the config to set the cache directory within the temporary test directory, avoiding side effects.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2222\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2222","id":857847231,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1MTk5MTM5","number":2222,"title":"Fix too long WindowsFileLock name","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892913,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEz","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/wontfix","name":"wontfix","color":"ffffff","default":true,"description":"This will not be worked on"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-14T12:26:52Z","updated_at":"2021-04-14T15:00:25Z","closed_at":"2021-04-14T14:46:19Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2222","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2222","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2222.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2222.patch"},"body":"Fix WindowsFileLock name longer than allowed MAX_PATH by shortening the basename.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2221\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2221","id":857833770,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1MTg4MTE5","number":2221,"title":"Add SLR70 - SLR80 and SLR86 to OpenSLR dataset","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-14T12:09:18Z","updated_at":"2021-04-14T13:50:19Z","closed_at":"2021-04-14T13:50:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2221","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2221","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2221.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2221.patch"},"body":"I would like to add SLR70, SLR71, SLR72, SLR73, SLR74, SLR75, SLR76, SLR77, SLR78, SLR79, SLR80 and SLR86 to OpenSLR dataset. The languages are:\r\nNigerian English, Chilean Spanish, Columbian Spanish, Peruvian Spanish, Puerto Rico Spanish, Venezuelan Spanish, Basque, Galician, Gujarati and Kannada.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2220\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2220","id":857774626,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE1MTM4NDQz","number":2220,"title":"Fix infinite loop in WindowsFileLock","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892913,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEz","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/wontfix","name":"wontfix","color":"ffffff","default":true,"description":"This will not be worked on"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-14T10:49:58Z","updated_at":"2021-04-14T14:59:50Z","closed_at":"2021-04-14T14:59:34Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2220","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2220","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2220.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2220.patch"},"body":"Raise exception to avoid infinite loop.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2219\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2219","id":857321242,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE0NzYxMzA3","number":2219,"title":"Added CUAD dataset","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-13T21:05:03Z","updated_at":"2021-04-24T14:25:51Z","closed_at":"2021-04-16T08:50:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2219","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2219","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2219.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2219.patch"},"body":"Dataset link : https:\/\/github.com\/TheAtticusProject\/cuad\/\r\n\r\nWorking on README.md currently.\r\n\r\nCloses #2084 and [#1](https:\/\/github.com\/TheAtticusProject\/cuad\/issues\/1). ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2218\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2218","id":857238435,"node_id":"MDU6SXNzdWU4NTcyMzg0MzU=","number":2218,"title":"Duplicates in the LAMA dataset","user":{"login":"amarasovic","id":7276193,"node_id":"MDQ6VXNlcjcyNzYxOTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7276193?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/amarasovic","html_url":"https:\/\/github.com\/amarasovic","followers_url":"https:\/\/api.github.com\/users\/amarasovic\/followers","following_url":"https:\/\/api.github.com\/users\/amarasovic\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/amarasovic\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/amarasovic\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/amarasovic\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/amarasovic\/orgs","repos_url":"https:\/\/api.github.com\/users\/amarasovic\/repos","events_url":"https:\/\/api.github.com\/users\/amarasovic\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/amarasovic\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-13T18:59:49Z","updated_at":"2021-04-14T21:42:27Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I observed duplicates in the LAMA probing dataset, see a minimal code below. \r\n\r\n```\r\n>>> import datasets\r\n>>> dataset = datasets.load_dataset('lama')\r\nNo config specified, defaulting to: lama\/trex\r\nReusing dataset lama (\/home\/anam\/.cache\/huggingface\/datasets\/lama\/trex\/1.1.0\/97deffae13eca0a18e77dfb3960bb31741e973586f5c1fe1ec0d6b5eece7bddc)\r\n>>> train_dataset = dataset['train']\r\n>>> train_dataset[0]\r\n{'description': 'language or languages a person has learned from early childhood', 'label': 'native language', 'masked_sentence': 'Louis Jules Trochu ([lwi \u0292yl t\u0281\u0254\u0283y]; 12 March 1815 \u2013 7 October 1896) was a [MASK] military leader and politician.', 'obj_label': 'French', 'obj_surface': 'French', 'obj_uri': 'Q150', 'predicate_id': 'P103', 'sub_label': 'Louis Jules Trochu', 'sub_surface': 'Louis Jules Trochu', 'sub_uri': 'Q441235', 'template': 'The native language of [X] is [Y] .', 'template_negated': '[X] is not owned by [Y] .', 'type': 'N-1', 'uuid': '40b2ed1c-0961-482e-844e-32596b6117c8'}\r\n>>> train_dataset[1]\r\n{'description': 'language or languages a person has learned from early childhood', 'label': 'native language', 'masked_sentence': 'Louis Jules Trochu ([lwi \u0292yl t\u0281\u0254\u0283y]; 12 March 1815 \u2013 7 October 1896) was a [MASK] military leader and politician.', 'obj_label': 'French', 'obj_surface': 'French', 'obj_uri': 'Q150', 'predicate_id': 'P103', 'sub_label': 'Louis Jules Trochu', 'sub_surface': 'Louis Jules Trochu', 'sub_uri': 'Q441235', 'template': 'The native language of [X] is [Y] .', 'template_negated': '[X] is not owned by [Y] .', 'type': 'N-1', 'uuid': '40b2ed1c-0961-482e-844e-32596b6117c8'}\r\n```\r\n\r\nI checked the original data available at https:\/\/dl.fbaipublicfiles.com\/LAMA\/data.zip. This particular duplicated comes from:\r\n```\r\n{\"uuid\": \"40b2ed1c-0961-482e-844e-32596b6117c8\", \"obj_uri\": \"Q150\", \"obj_label\": \"French\", \"sub_uri\": \"Q441235\", \"sub_label\": \"Louis Jules Trochu\", \"predicate_id\": \"P103\", \"evidences\": [{\"sub_surface\": \"Louis Jules Trochu\", \"obj_surface\": \"French\", \"masked_sentence\": \"Louis Jules Trochu ([lwi \\u0292yl t\\u0281\\u0254\\u0283y]; 12 March 1815 \\u2013 7 October 1896) was a [MASK] military leader and politician.\"}, {\"sub_surface\": \"Louis Jules Trochu\", \"obj_surface\": \"French\", \"masked_sentence\": \"Louis Jules Trochu ([lwi \\u0292yl t\\u0281\\u0254\\u0283y]; 12 March 1815 \\u2013 7 October 1896) was a [MASK] military leader and politician.\"}]}\r\n``` \r\n\r\nWhat is the best way to deal with these duplicates if I want to use `datasets` to probe with LAMA? ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2217\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2217","id":857011314,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE0NTAxNjIz","number":2217,"title":"Revert breaking change in cache_files property","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-13T14:20:04Z","updated_at":"2021-04-14T14:24:24Z","closed_at":"2021-04-14T14:24:23Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2217","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2217","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2217.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2217.patch"},"body":"#2025 changed the format of `Dataset.cache_files`.\r\nBefore it was formatted like\r\n```python\r\n[{\"filename\": \"path\/to\/file.arrow\", \"start\": 0, \"end\": 1337}]\r\n```\r\nand it was changed to\r\n```python\r\n[\"path\/to\/file.arrow\"]\r\n```\r\nsince there's no start\/end offsets available anymore.\r\n\r\nTo make this less breaking, I'm setting the format back to a list of dicts:\r\n```python\r\n[{\"filename\": \"path\/to\/file.arrow\"}]\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2216\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2216","id":856955534,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE0NDU0MjE1","number":2216,"title":"added real label for glue\/mrpc to test set","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-13T13:20:20Z","updated_at":"2021-04-13T13:53:20Z","closed_at":"2021-04-13T13:53:19Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2216","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2216","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2216.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2216.patch"},"body":"Added real label to `glue.py` `mrpc` task for test split.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2215\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2215","id":856716791,"node_id":"MDExOlB1bGxSZXF1ZXN0NjE0MjUyNTEy","number":2215,"title":"Add datasets SLR35 and SLR36 to OpenSLR ","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-13T08:24:07Z","updated_at":"2021-04-13T14:05:14Z","closed_at":"2021-04-13T14:05:14Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2215","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2215","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2215.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2215.patch"},"body":"I would like to add [SLR35](https:\/\/openslr.org\/35\/) (18GB) and [SLR36](https:\/\/openslr.org\/36\/) (22GB) which are Large Javanese and Sundanese ASR training data set collected by Google in collaboration with Reykjavik University and Universitas Gadjah Mada in Indonesia.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2214\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2214","id":856333657,"node_id":"MDU6SXNzdWU4NTYzMzM2NTc=","number":2214,"title":"load_metric error: module 'datasets.utils.file_utils' has no attribute 'add_start_docstrings'","user":{"login":"nsaphra","id":414788,"node_id":"MDQ6VXNlcjQxNDc4OA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/414788?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nsaphra","html_url":"https:\/\/github.com\/nsaphra","followers_url":"https:\/\/api.github.com\/users\/nsaphra\/followers","following_url":"https:\/\/api.github.com\/users\/nsaphra\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nsaphra\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nsaphra\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nsaphra\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nsaphra\/orgs","repos_url":"https:\/\/api.github.com\/users\/nsaphra\/repos","events_url":"https:\/\/api.github.com\/users\/nsaphra\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nsaphra\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-12T20:26:01Z","updated_at":"2021-04-23T15:20:02Z","closed_at":"2021-04-23T15:20:02Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I'm having the same problem as [Notebooks issue 10](https:\/\/github.com\/huggingface\/notebooks\/issues\/10) on datasets 1.2.1, and it seems to be an issue with the datasets package.\r\n\r\n```python\r\n>>> from datasets import load_metric\r\n>>> metric = load_metric(\"glue\", \"sst2\")\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/ext3\/miniconda3\/lib\/python3.8\/site-packages\/datasets-1.2.1-py3.8.egg\/datasets\/load.py\", line 502, in load_metric\r\n File \"\/ext3\/miniconda3\/lib\/python3.8\/site-packages\/datasets-1.2.1-py3.8.egg\/datasets\/load.py\", line 66, in import_main_class\r\n File \"\/ext3\/miniconda3\/lib\/python3.8\/importlib\/__init__.py\", line 127, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File \"\", line 1014, in _gcd_import\r\n File \"\", line 991, in _find_and_load\r\n File \"\", line 975, in _find_and_load_unlocked\r\n File \"\", line 671, in _load_unlocked\r\n File \"\", line 783, in exec_module\r\n File \"\", line 219, in _call_with_frames_removed\r\n File \"\/home\/ns4008\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/glue\/e4606ab9804a36bcd5a9cebb2cb65bb14b6ac78ee9e6d5981fa679a495dd55de\/glue.py\", line 105, in \r\n @datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION, _KWARGS_DESCRIPTION)\r\nAttributeError: module 'datasets.utils.file_utils' has no attribute 'add_start_docstrings'\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2213\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2213","id":856025320,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEzNjcwODk2","number":2213,"title":"Fix lc_quad download checksum","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-12T14:16:59Z","updated_at":"2021-04-14T22:04:54Z","closed_at":"2021-04-14T13:42:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2213","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2213","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2213.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2213.patch"},"body":"Fixes #2211 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2212\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2212","id":855999133,"node_id":"MDU6SXNzdWU4NTU5OTkxMzM=","number":2212,"title":"Can't reach \"https:\/\/storage.googleapis.com\/illuin\/fquad\/train.json.zip\" when trying to load fquad dataset","user":{"login":"hanss0n","id":21348833,"node_id":"MDQ6VXNlcjIxMzQ4ODMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21348833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hanss0n","html_url":"https:\/\/github.com\/hanss0n","followers_url":"https:\/\/api.github.com\/users\/hanss0n\/followers","following_url":"https:\/\/api.github.com\/users\/hanss0n\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hanss0n\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hanss0n\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hanss0n\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hanss0n\/orgs","repos_url":"https:\/\/api.github.com\/users\/hanss0n\/repos","events_url":"https:\/\/api.github.com\/users\/hanss0n\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hanss0n\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-04-12T13:49:56Z","updated_at":"2021-05-17T22:17:06Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I'm trying to load the [fquad dataset](https:\/\/huggingface.co\/datasets\/fquad) by running: \r\n\r\n```Python\r\nfquad = load_dataset(\"fquad\")\r\n```\r\n\r\nwhich produces the following error:\r\n\r\n```\r\nUsing custom data configuration default\r\n\r\nDownloading and preparing dataset fquad\/default (download: 3.14 MiB, generated: 6.62 MiB, post-processed: Unknown size, total: 9.76 MiB) to \/root\/.cache\/huggingface\/datasets\/fquad\/default\/0.1.0\/778dc2c85813d05ddd0c17087294d5f8f24820752340958070876b677af9f061...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nConnectionError Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 fquad = load_dataset(\"fquad\")\r\n\r\n11 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n 614 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n 615 _raise_if_offline_mode_is_enabled(f\"Tried to reach {url}\")\r\n--> 616 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n 617 \r\n 618 # Try a second time\r\n\r\nConnectionError: Couldn't reach https:\/\/storage.googleapis.com\/illuin\/fquad\/train.json.zip\r\n```\r\n\r\nDoes anyone know why that is and how to fix it? ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2211\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2211","id":855988410,"node_id":"MDU6SXNzdWU4NTU5ODg0MTA=","number":2211,"title":"Getting checksum error when trying to load lc_quad dataset","user":{"login":"hanss0n","id":21348833,"node_id":"MDQ6VXNlcjIxMzQ4ODMz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21348833?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hanss0n","html_url":"https:\/\/github.com\/hanss0n","followers_url":"https:\/\/api.github.com\/users\/hanss0n\/followers","following_url":"https:\/\/api.github.com\/users\/hanss0n\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hanss0n\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hanss0n\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hanss0n\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hanss0n\/orgs","repos_url":"https:\/\/api.github.com\/users\/hanss0n\/repos","events_url":"https:\/\/api.github.com\/users\/hanss0n\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hanss0n\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-12T13:38:58Z","updated_at":"2021-04-14T13:42:25Z","closed_at":"2021-04-14T13:42:25Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I'm having issues loading the [lc_quad](https:\/\/huggingface.co\/datasets\/fquad) dataset by running:\r\n\r\n```Python\r\nlc_quad = load_dataset(\"lc_quad\")\r\n```\r\n\r\nwhich is giving me the following error:\r\n\r\n``` \r\nUsing custom data configuration default\r\n\r\nDownloading and preparing dataset lc_quad\/default (download: 3.69 MiB, generated: 19.77 MiB, post-processed: Unknown size, total: 23.46 MiB) to \/root\/.cache\/huggingface\/datasets\/lc_quad\/default\/2.0.0\/5a98fe174603f5dec6df07edf1c2b4d2317210d2ad61f5a393839bca4d64e5a7...\r\n\r\n---------------------------------------------------------------------------\r\n\r\nNonMatchingChecksumError Traceback (most recent call last)\r\n\r\n in ()\r\n----> 1 lc_quad = load_dataset(\"lc_quad\")\r\n\r\n3 frames\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/info_utils.py in verify_checksums(expected_checksums, recorded_checksums, verification_name)\r\n 37 if len(bad_urls) > 0:\r\n 38 error_msg = \"Checksums didn't match\" + for_verification_name + \":\\n\"\r\n---> 39 raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\n 40 logger.info(\"All the checksums matched successfully\" + for_verification_name)\r\n 41 \r\n\r\nNonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/github.com\/AskNowQA\/LC-QuAD2.0\/archive\/master.zip']\r\n```\r\n\r\nDoes anyone know why this could be and how I fix it? ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2210\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2210","id":855709400,"node_id":"MDU6SXNzdWU4NTU3MDk0MDA=","number":2210,"title":"dataloading slow when using HUGE dataset","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-12T08:33:02Z","updated_at":"2021-04-13T02:03:05Z","closed_at":"2021-04-13T02:03:05Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nWhen I use datasets with 600GB data, the dataloading speed increases significantly. \r\nI am experimenting with two datasets, and one is about 60GB and the other 600GB.\r\nSimply speaking, my code uses `datasets.set_format(\"torch\")` function and let pytorch-lightning handle ddp training.\r\nWhen looking at the pytorch-lightning supported profile of two different runs, I see that fetching a batch(`get_train_batch`) consumes an unreasonable amount of time when data is large. What could be the cause?\r\n\r\n* 60GB data\r\n```\r\nAction \t| Mean duration (s)\t|Num calls \t| Total time (s) \t| Percentage % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nTotal \t| - \t|_ \t| 200.33 \t| 100 % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nrun_training_epoch \t| 71.994 \t|1 \t| 71.994 \t| 35.937 \t|\r\nrun_training_batch \t| 0.64373 \t|100 \t| 64.373 \t| 32.133 \t|\r\noptimizer_step_and_closure_0 \t| 0.64322 \t|100 \t| 64.322 \t| 32.108 \t|\r\ntraining_step_and_backward \t| 0.61004 \t|100 \t| 61.004 \t| 30.452 \t|\r\nmodel_backward \t| 0.37552 \t|100 \t| 37.552 \t| 18.745 \t|\r\nmodel_forward \t| 0.22813 \t|100 \t| 22.813 \t| 11.387 \t|\r\ntraining_step \t| 0.22759 \t|100 \t| 22.759 \t| 11.361 \t|\r\nget_train_batch \t| 0.066385 \t|100 \t| 6.6385 \t| 3.3138 \t|\r\n```\r\n\r\n* 600GB data\r\n```\r\nAction \t| Mean duration (s)\t|Num calls \t| Total time (s) \t| Percentage % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nTotal \t| - \t|_ \t| 3285.6 \t| 100 % \t|\r\n------------------------------------------------------------------------------------------------------------------------------------\r\nrun_training_epoch \t| 1397.9 \t|1 \t| 1397.9 \t| 42.546 \t|\r\nrun_training_batch \t| 7.2596 \t|100 \t| 725.96 \t| 22.095 \t|\r\noptimizer_step_and_closure_0 \t| 7.2589 \t|100 \t| 725.89 \t| 22.093 \t|\r\ntraining_step_and_backward \t| 7.223 \t|100 \t| 722.3 \t| 21.984 \t|\r\nmodel_backward \t| 6.9662 \t|100 \t| 696.62 \t| 21.202 \t|\r\nget_train_batch \t| 6.322 \t|100 \t| 632.2 \t| 19.241 \t|\r\nmodel_forward \t| 0.24902 \t|100 \t| 24.902 \t| 0.75789 \t|\r\ntraining_step \t| 0.2485 \t|100 \t| 24.85 \t| 0.75633 \t|\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2209\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2209","id":855638232,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEzMzQwMTI2","number":2209,"title":"Add code of conduct to the project","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-12T07:16:14Z","updated_at":"2021-04-12T17:55:52Z","closed_at":"2021-04-12T17:55:52Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2209","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2209","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2209.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2209.patch"},"body":"Add code of conduct to the project and link it from README and CONTRIBUTING.\r\n\r\nThis was already done in `transformers`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2208\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2208","id":855343835,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEzMTAxMzMw","number":2208,"title":"Remove Python2 leftovers","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-11T16:08:03Z","updated_at":"2021-04-14T22:05:36Z","closed_at":"2021-04-14T13:40:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2208","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2208","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2208.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2208.patch"},"body":"This PR removes Python2 leftovers since this project aims for Python3.6+ (and as of 2020 Python2 is no longer officially supported)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2207\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2207","id":855267383,"node_id":"MDU6SXNzdWU4NTUyNjczODM=","number":2207,"title":"making labels consistent across the datasets","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-11T10:03:56Z","updated_at":"2021-04-14T14:02:00Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nFor accessing the labels one can type \r\n```\r\n>>> a.features['label']\r\nClassLabel(num_classes=3, names=['entailment', 'neutral', 'contradiction'], names_file=None, id=None)\r\n```\r\nThe labels however are not consistent with the actual labels sometimes, for instance in case of XNLI, the actual labels are 0,1,2, but if one try to access as above they are entailment, neutral,contradiction,\r\nit would be great to have the labels consistent.\r\n\r\nthanks \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2206\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2206","id":855252415,"node_id":"MDU6SXNzdWU4NTUyNTI0MTU=","number":2206,"title":"Got pyarrow error when loading a dataset while adding special tokens into the tokenizer","user":{"login":"yana-xuyan","id":38536635,"node_id":"MDQ6VXNlcjM4NTM2NjM1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/38536635?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yana-xuyan","html_url":"https:\/\/github.com\/yana-xuyan","followers_url":"https:\/\/api.github.com\/users\/yana-xuyan\/followers","following_url":"https:\/\/api.github.com\/users\/yana-xuyan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yana-xuyan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yana-xuyan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yana-xuyan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yana-xuyan\/orgs","repos_url":"https:\/\/api.github.com\/users\/yana-xuyan\/repos","events_url":"https:\/\/api.github.com\/users\/yana-xuyan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yana-xuyan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-11T08:40:09Z","updated_at":"2021-04-14T06:06:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I added five more special tokens into the GPT2 tokenizer. But after that, when I try to pre-process the data using my previous code, I got an error shown below:\r\n\r\nTraceback (most recent call last):\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1687, in _map_single\r\n writer.write(example)\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 296, in write\r\n self.write_on_file()\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 270, in write_on_file\r\n pa_array = pa.array(typed_sequence)\r\n File \"pyarrow\/array.pxi\", line 222, in pyarrow.lib.array\r\n File \"pyarrow\/array.pxi\", line 110, in pyarrow.lib._handle_arrow_array_protocol\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/datasets\/arrow_writer.py\", line 108, in __arrow_array__\r\n out = out.cast(pa.list_(self.optimized_int_type))\r\n File \"pyarrow\/array.pxi\", line 810, in pyarrow.lib.Array.cast\r\n File \"\/home\/xuyan\/anaconda3\/envs\/convqa\/lib\/python3.7\/site-packages\/pyarrow\/compute.py\", line 281, in cast\r\n return call_function(\"cast\", [arr], options)\r\n File \"pyarrow\/_compute.pyx\", line 465, in pyarrow._compute.call_function\r\n File \"pyarrow\/_compute.pyx\", line 294, in pyarrow._compute.Function.call\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Integer value 50259 not in range: -128 to 127\r\n\r\nDo you have any idea about it?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2205\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2205","id":855207605,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEzMDAwMzYw","number":2205,"title":"Updating citation information on LinCE readme","user":{"login":"gaguilar","id":5833357,"node_id":"MDQ6VXNlcjU4MzMzNTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5833357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gaguilar","html_url":"https:\/\/github.com\/gaguilar","followers_url":"https:\/\/api.github.com\/users\/gaguilar\/followers","following_url":"https:\/\/api.github.com\/users\/gaguilar\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gaguilar\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gaguilar\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gaguilar\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gaguilar\/orgs","repos_url":"https:\/\/api.github.com\/users\/gaguilar\/repos","events_url":"https:\/\/api.github.com\/users\/gaguilar\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gaguilar\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-11T03:18:05Z","updated_at":"2021-04-12T17:53:34Z","closed_at":"2021-04-12T17:53:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2205","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2205","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2205.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2205.patch"},"body":"Hi!\r\n\r\nI just updated the citation information in this PR. It had an additional bibtex from one of the datasets used in LinCE and then the LinCE bibtex. I removed the former and added a link that shows the full list of citations for each dataset. \r\n\r\nThanks!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2204\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2204","id":855144431,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyOTU1MzM2","number":2204,"title":"Add configurable options to `seqeval` metric","user":{"login":"marrodion","id":44571847,"node_id":"MDQ6VXNlcjQ0NTcxODQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44571847?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/marrodion","html_url":"https:\/\/github.com\/marrodion","followers_url":"https:\/\/api.github.com\/users\/marrodion\/followers","following_url":"https:\/\/api.github.com\/users\/marrodion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/marrodion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/marrodion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/marrodion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/marrodion\/orgs","repos_url":"https:\/\/api.github.com\/users\/marrodion\/repos","events_url":"https:\/\/api.github.com\/users\/marrodion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/marrodion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-10T19:58:19Z","updated_at":"2021-04-15T13:49:46Z","closed_at":"2021-04-15T13:49:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2204","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2204","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2204.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2204.patch"},"body":"Fixes #2148\r\n\r\nAdds options to use strict mode, different schemes of evaluation, sample weight and adjust zero_division behavior, if encountered.\r\n\r\n`seqeval` provides schemes as objects, hence dynamic import from string, to avoid making the user do the import (thanks to @albertvillanova for the `importlib` idea).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2203\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2203","id":855053595,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyODg4MzA5","number":2203,"title":"updated banking77 train and test data","user":{"login":"hsali","id":6765330,"node_id":"MDQ6VXNlcjY3NjUzMzA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6765330?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hsali","html_url":"https:\/\/github.com\/hsali","followers_url":"https:\/\/api.github.com\/users\/hsali\/followers","following_url":"https:\/\/api.github.com\/users\/hsali\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hsali\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hsali\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hsali\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hsali\/orgs","repos_url":"https:\/\/api.github.com\/users\/hsali\/repos","events_url":"https:\/\/api.github.com\/users\/hsali\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hsali\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-10T12:10:10Z","updated_at":"2021-04-23T14:33:39Z","closed_at":"2021-04-23T14:33:39Z","author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2203","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2203","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2203.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2203.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2202\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2202","id":854501109,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyNDM2ODMx","number":2202,"title":"Add classes GenerateMode, DownloadConfig and Version to the documentation","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-09T12:58:19Z","updated_at":"2021-04-12T17:58:00Z","closed_at":"2021-04-12T17:57:59Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2202","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2202","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2202.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2202.patch"},"body":"Add documentation for classes `GenerateMode`, `DownloadConfig` and `Version`.\r\n\r\nUpdate the docstring of `load_dataset` to create cross-reference links to the classes.\r\n\r\nRelated to #2187.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2201\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2201","id":854499563,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyNDM1NTE3","number":2201,"title":"Fix ArrowWriter overwriting features in ArrowBasedBuilder","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-09T12:56:19Z","updated_at":"2021-04-12T13:32:17Z","closed_at":"2021-04-12T13:32:16Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2201","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2201","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2201.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2201.patch"},"body":"This should fix the issues with CSV loading experienced in #2153 and #2200.\r\n\r\nThe CSV builder is an ArrowBasedBuilder that had an issue with its ArrowWriter used to write the arrow file from the csv data.\r\nThe writer wasn't initialized with the features passed by the user. Therefore the writer was inferring the features from the arrow data, discarding the features passed by the user.\r\n\r\nI fixed that and I updated the tests","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2200\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2200","id":854449656,"node_id":"MDU6SXNzdWU4NTQ0NDk2NTY=","number":2200,"title":"_prepare_split will overwrite DatasetBuilder.info.features","user":{"login":"Gforky","id":4157614,"node_id":"MDQ6VXNlcjQxNTc2MTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/4157614?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Gforky","html_url":"https:\/\/github.com\/Gforky","followers_url":"https:\/\/api.github.com\/users\/Gforky\/followers","following_url":"https:\/\/api.github.com\/users\/Gforky\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Gforky\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Gforky\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Gforky\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Gforky\/orgs","repos_url":"https:\/\/api.github.com\/users\/Gforky\/repos","events_url":"https:\/\/api.github.com\/users\/Gforky\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Gforky\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-04-09T11:47:13Z","updated_at":"2021-06-04T10:37:35Z","closed_at":"2021-06-04T10:37:35Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, here is my issue:\r\nI initialized a Csv datasetbuilder with specific features:\r\n```\r\ndef get_dataset_features(data_args):\r\n features = {}\r\n if data_args.text_features:\r\n features.update({text_feature: hf_features.Value(\"string\") for text_feature in data_args.text_features.strip().split(\",\")})\r\n if data_args.num_features:\r\n features.update({text_feature: hf_features.Value(\"float32\") for text_feature in data_args.num_features.strip().split(\",\")})\r\n if data_args.label_classes:\r\n features[\"label\"] = hf_features.ClassLabel(names=data_args.label_classes.strip().split(\",\"))\r\n else:\r\n features[\"label\"] = hf_features.Value(\"float32\")\r\n return hf_features.Features(features)\r\n\r\ndatasets = load_dataset(extension,\r\n data_files=data_files,\r\n sep=data_args.delimiter,\r\n header=data_args.header,\r\n column_names=data_args.column_names.split(\",\") if data_args.column_names else None,\r\n features=get_dataset_features(data_args=data_args))\r\n```\r\nThe `features` is printout as below before `builder_instance.as_dataset` is called:\r\n```\r\n{'label': ClassLabel(num_classes=2, names=['unacceptable', 'acceptable'], names_file=None, id=None), 'notated': Value(dtype='string', id=None), 'sentence': Value(dtype='string', id=None), 'src_code': Value(dtype='string', id=None)}\r\n````\r\n\r\nBut after the `builder_instance.as_dataset` is called for Csv dataset builder, the `features` is changed to:\r\n```\r\n{'label': Value(dtype='int64', id=None), 'notated': Value(dtype='string', id=None), 'sentence': Value(dtype='string', id=None), 'src_code': Value(dtype='string', id=None)}\r\n```\r\n\r\nAfter digged into the code, I releazed that in `ArrowBasedBuilder._prepare_split`, the DatasetBuilder's info's features will be overwrited by `ArrowWriter`'s `_features`. \r\nBut `ArrowWriter` is initailized without passing `features`.\r\nSo my concern is:\r\nIt's this overwrite must be done, or, should it be an option to pass features in `_prepare_split` function?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2199\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2199","id":854417318,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyMzY0ODU3","number":2199,"title":"Fix backward compatibility in Dataset.load_from_disk","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-09T11:01:10Z","updated_at":"2021-04-09T15:57:05Z","closed_at":"2021-04-09T15:57:05Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2199","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2199","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2199.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2199.patch"},"body":"Fix backward compatibility when loading from disk an old dataset saved to disk with indices using key \"_indices_data_files\".\r\n\r\nRelated to #2195.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2198\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2198","id":854357481,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyMzE0MTIz","number":2198,"title":"added file_permission in load_dataset","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-09T09:39:06Z","updated_at":"2021-04-16T14:11:46Z","closed_at":"2021-04-16T14:11:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2198","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2198","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2198.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2198.patch"},"body":"As discussed in #2065 I've added `file_permission` argument in `load_dataset`. \r\n\r\nAdded mainly 2 things here:\r\n1) Permission of downloaded datasets when converted to .arrow files can be changed with argument `file_permission` argument in `load_dataset` (default is 0o644 only)\r\n2) Incase the user uses `map` later on to generate another cache file of dataset, it ensures the permissions of newly generated file are similar to that of` *-train.arrow` file inside cache_dir for that dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2197\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2197","id":854356559,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEyMzEzMzQw","number":2197,"title":"fix missing indices_files in load_form_disk","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-09T09:37:57Z","updated_at":"2021-04-09T09:54:40Z","closed_at":"2021-04-09T09:54:39Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2197","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2197","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2197.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2197.patch"},"body":"This should fix #2195\r\n\r\n`load_from_disk` was failing if there was no \"_indices_files\" field in state.json. This can happen if the dataset has no indices mapping","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2196\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2196","id":854126114,"node_id":"MDU6SXNzdWU4NTQxMjYxMTQ=","number":2196,"title":"`load_dataset` caches two arrow files?","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-09T03:49:19Z","updated_at":"2021-04-12T05:25:29Z","closed_at":"2021-04-12T05:25:29Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nI am using datasets to load large json file of 587G.\r\nI checked the cached folder and found that there are two arrow files created:\r\n* `cache-ed205e500a7dc44c.arrow` - 355G\r\n* `json-train.arrow` - 582G\r\n\r\nWhy is the first file created?\r\nIf I delete it, would I still be able to `load_from_disk`?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2195\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2195","id":854070194,"node_id":"MDU6SXNzdWU4NTQwNzAxOTQ=","number":2195,"title":"KeyError: '_indices_files' in `arrow_dataset.py`","user":{"login":"samsontmr","id":15007950,"node_id":"MDQ6VXNlcjE1MDA3OTUw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15007950?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/samsontmr","html_url":"https:\/\/github.com\/samsontmr","followers_url":"https:\/\/api.github.com\/users\/samsontmr\/followers","following_url":"https:\/\/api.github.com\/users\/samsontmr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/samsontmr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/samsontmr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/samsontmr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/samsontmr\/orgs","repos_url":"https:\/\/api.github.com\/users\/samsontmr\/repos","events_url":"https:\/\/api.github.com\/users\/samsontmr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/samsontmr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-09T01:37:12Z","updated_at":"2021-04-09T09:55:09Z","closed_at":"2021-04-09T09:54:39Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"After pulling the latest master, I'm getting a crash when `load_from_disk` tries to load my local dataset.\r\n\r\nTrace:\r\n```\r\nTraceback (most recent call last):\r\n File \"load_data.py\", line 11, in \r\n dataset = load_from_disk(SRC)\r\n File \"\/opt\/conda\/envs\/py38\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 784, in load_from_disk\r\n return DatasetDict.load_from_disk(dataset_path, fs, keep_in_memory=keep_in_memory)\r\n File \"\/opt\/conda\/envs\/py38\/lib\/python3.8\/site-packages\/datasets\/dataset_dict.py\", line 692, in load_from_disk\r\n dataset_dict[k] = Dataset.load_from_disk(dataset_dict_split_path, fs, keep_in_memory=keep_in_memory)\r\n File \"\/opt\/conda\/envs\/py38\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 634, in load_from_disk\r\n if state[\"_indices_files\"]:\r\nKeyError: '_indices_files'\r\n```\r\n\r\nI believe this is the line causing the error since there may not be a `_indices_files` key in the older versions:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/b70141e3c5149430951773aaa0155555c5fb3e76\/src\/datasets\/arrow_dataset.py#L634\r\n\r\nMay I suggest using `state.get()` instead of directly indexing the dictionary?\r\n\r\n@lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2194\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2194","id":853909452,"node_id":"MDU6SXNzdWU4NTM5MDk0NTI=","number":2194,"title":"py3.7: TypeError: can't pickle _LazyModule objects","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-08T21:02:48Z","updated_at":"2021-04-09T16:56:50Z","closed_at":"2021-04-09T01:52:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"While this works fine with py3.8, under py3.7, with a totally new conda env and transformers install:\r\n\r\n```\r\ngit clone https:\/\/github.com\/huggingface\/transformers\r\ncd transformers\r\npip install -e .[testing]\r\n\r\nexport BS=1; rm -rf \/tmp\/test-clm; PYTHONPATH=src USE_TF=0 CUDA_VISIBLE_DEVICES=0 python \\\r\nexamples\/language-modeling\/run_clm.py --model_name_or_path distilgpt2 --dataset_name wikitext \\\r\n--dataset_config_name wikitext-2-raw-v1 --do_train --max_train_samples 1 \\\r\n--per_device_train_batch_size $BS --output_dir \/tmp\/test-clm --block_size 128 --logging_steps 1 \\\r\n--fp16\r\n```\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"examples\/language-modeling\/run_clm.py\", line 453, in \r\n main()\r\n File \"examples\/language-modeling\/run_clm.py\", line 336, in main\r\n load_from_cache_file=not data_args.overwrite_cache,\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py\", line 303, in map\r\n for k, dataset in self.items()\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py\", line 303, in \r\n for k, dataset in self.items()\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1259, in map\r\n update_data=update_data,\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 157, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 158, in wrapper\r\n self._fingerprint, transform, kwargs_for_fingerprint\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 105, in update_fingerprint\r\n hasher.update(transform_args[key])\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 57, in update\r\n self.m.update(self.hash(value).encode(\"utf-8\"))\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 53, in hash\r\n return cls.hash_default(value)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 46, in hash_default\r\n return cls.hash_bytes(dumps(value))\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 389, in dumps\r\n dump(obj, file)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 361, in dump\r\n Pickler(file, recurse=True).dump(obj)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 454, in dump\r\n StockPickler.dump(self, obj)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 437, in dump\r\n self.save(obj)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/datasets\/utils\/py_utils.py\", line 556, in save_function\r\n obj=obj,\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/home\/stas\/anaconda3\/lib\/python3.7\/pickle.py\", line 524, in save\r\n rv = reduce(self.proto)\r\nTypeError: can't pickle _LazyModule objects\r\n```\r\n```\r\n$ python --version\r\nPython 3.7.4\r\n\r\n$ python -m torch.utils.collect_env\r\nCollecting environment information...\r\nPyTorch version: 1.8.0.dev20210110+cu110\r\nIs debug build: False\r\nCUDA used to build PyTorch: 11.0\r\nROCM used to build PyTorch: N\/A\r\n\r\nOS: Ubuntu 20.04.2 LTS (x86_64)\r\nGCC version: (Ubuntu 9.3.0-17ubuntu1~20.04) 9.3.0\r\nClang version: 10.0.0-4ubuntu1 \r\nCMake version: version 3.16.3\r\n```\r\n\r\nThanks.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2193\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2193","id":853725707,"node_id":"MDU6SXNzdWU4NTM3MjU3MDc=","number":2193,"title":"Filtering\/mapping on one column is very slow","user":{"login":"norabelrose","id":39116809,"node_id":"MDQ6VXNlcjM5MTE2ODA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39116809?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/norabelrose","html_url":"https:\/\/github.com\/norabelrose","followers_url":"https:\/\/api.github.com\/users\/norabelrose\/followers","following_url":"https:\/\/api.github.com\/users\/norabelrose\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/norabelrose\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/norabelrose\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/norabelrose\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/norabelrose\/orgs","repos_url":"https:\/\/api.github.com\/users\/norabelrose\/repos","events_url":"https:\/\/api.github.com\/users\/norabelrose\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/norabelrose\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2021-04-08T18:16:14Z","updated_at":"2021-04-26T16:13:59Z","closed_at":"2021-04-26T16:13:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I'm currently using the `wikipedia` dataset\u2014 I'm tokenizing the articles with the `tokenizers` library using `map()` and also adding a new `num_tokens` column to the dataset as part of that map operation.\r\n\r\nI want to be able to _filter_ the dataset based on this `num_tokens` column, but even when I specify `input_columns=['num_tokens']`, it seems that the entirety of each row is loaded into memory, which makes the operation take much longer than it should. Indeed, `filter` currently just calls `map`, and I found that in `_map_single` on lines 1690-1704 of `arrow_dataset.py`, the method is just grabbing slices of _all the rows_ of the dataset and then passing only the specified columns to the map function. It seems that, when the user passes a value for `input_columns`, the `map` function should create a temporary pyarrow table by selecting just those columns, and then get slices from that table. Or something like that\u2014 I'm not very familiar with the pyarrow API.\r\n\r\nI know that in the meantime I can sort of get around this by simply only returning the rows that match my filter criterion from the tokenizing function I pass to `map()`, but I actually _also_ want to map on just the `num_tokens` column in order to compute batches with a roughly uniform number of tokens per batch. I would also ideally like to be able to change my minimum and maximum article lengths without having to re-tokenize the entire dataset.\r\n\r\nPS: This is definitely not a \"dataset request.\" I'm realizing that I don't actually know how to remove labels from my own issues on other people's repos, if that is even possible.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2192\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2192","id":853547910,"node_id":"MDExOlB1bGxSZXF1ZXN0NjExNjE5NTY0","number":2192,"title":"Fix typo in huggingface hub","user":{"login":"LysandreJik","id":30755778,"node_id":"MDQ6VXNlcjMwNzU1Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/30755778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LysandreJik","html_url":"https:\/\/github.com\/LysandreJik","followers_url":"https:\/\/api.github.com\/users\/LysandreJik\/followers","following_url":"https:\/\/api.github.com\/users\/LysandreJik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LysandreJik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LysandreJik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LysandreJik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LysandreJik\/orgs","repos_url":"https:\/\/api.github.com\/users\/LysandreJik\/repos","events_url":"https:\/\/api.github.com\/users\/LysandreJik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LysandreJik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-08T14:42:24Z","updated_at":"2021-04-08T15:47:41Z","closed_at":"2021-04-08T15:47:40Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2192","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2192","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2192.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2192.patch"},"body":"pip knows how to resolve to `huggingface_hub`, but conda doesn't!\r\n\r\nThe `packaging` dependency is also required for the build to complete.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2191\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2191","id":853364204,"node_id":"MDExOlB1bGxSZXF1ZXN0NjExNDY1Nzc0","number":2191,"title":"Refactorize tests to use Dataset as context manager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/1","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1\/labels","id":6644198,"node_id":"MDk6TWlsZXN0b25lNjY0NDE5OA==","number":1,"title":"1.6","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":4,"state":"closed","created_at":"2021-04-09T13:07:51Z","updated_at":"2021-04-20T16:50:46Z","due_on":"2021-04-16T07:00:00Z","closed_at":"2021-04-20T16:50:46Z"},"comments":4,"created_at":"2021-04-08T11:21:04Z","updated_at":"2021-04-19T07:53:11Z","closed_at":"2021-04-19T07:53:10Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2191","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2191","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2191.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2191.patch"},"body":"Refactorize Dataset tests to use Dataset as context manager.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2190\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2190","id":853181564,"node_id":"MDU6SXNzdWU4NTMxODE1NjQ=","number":2190,"title":"News_commentary Dataset Translation Pairs are of Incorrect Language Specified Pairs","user":{"login":"anassalamah","id":8571003,"node_id":"MDQ6VXNlcjg1NzEwMDM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8571003?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anassalamah","html_url":"https:\/\/github.com\/anassalamah","followers_url":"https:\/\/api.github.com\/users\/anassalamah\/followers","following_url":"https:\/\/api.github.com\/users\/anassalamah\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anassalamah\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anassalamah\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anassalamah\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anassalamah\/orgs","repos_url":"https:\/\/api.github.com\/users\/anassalamah\/repos","events_url":"https:\/\/api.github.com\/users\/anassalamah\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anassalamah\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-08T07:53:43Z","updated_at":"2021-05-24T10:03:55Z","closed_at":"2021-05-24T10:03:55Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I used load_dataset to load the news_commentary dataset for \"ar-en\" translation pairs but found translations from Arabic to Hindi. \r\n\r\n```\r\ntrain_ds = load_dataset(\"news_commentary\", \"ar-en\", split='train[:98%]')\r\nval_ds = load_dataset(\"news_commentary\", \"ar-en\", split='train[98%:]')\r\n\r\n# filtering out examples that are not ar-en translations but ar-hi\r\nval_ds = val_ds.filter(lambda example, indice: indice not in chain(range(1312,1327) ,range(1384,1399), range(1030,1042)), with_indices=True)\r\n```\r\n\r\n* I'm fairly new to using datasets so I might be doing something wrong","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2189\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2189","id":853052891,"node_id":"MDU6SXNzdWU4NTMwNTI4OTE=","number":2189,"title":"save_to_disk doesn't work when we use concatenate_datasets function before creating the final dataset_object.","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-08T04:42:53Z","updated_at":"2021-04-14T13:57:05Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"As you can see, it saves the entire dataset.\r\n\r\n@lhoestq \r\n\r\nYou can check by going through the following example,\r\n\r\n```\r\nfrom datasets import load_from_disk,concatenate_datasets\r\n\r\nloaded_data=load_from_disk('\/home\/gsir059\/HNSW-ori\/my_knowledge_dataset')\r\nn=20\r\nkb_list=[loaded_data.shard(n, i, contiguous=True) for i in range(n)]\r\nfinal_dataset=concatenate_datasets([kb_list[1],kb_list[2]])\r\nfinal_dataset.save_to_disk('\/home\/gsir059\/haha\/k.arrow')\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2188\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2188","id":853044166,"node_id":"MDU6SXNzdWU4NTMwNDQxNjY=","number":2188,"title":"Duplicate data in Timit dataset","user":{"login":"BHM-RB","id":78190188,"node_id":"MDQ6VXNlcjc4MTkwMTg4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/78190188?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BHM-RB","html_url":"https:\/\/github.com\/BHM-RB","followers_url":"https:\/\/api.github.com\/users\/BHM-RB\/followers","following_url":"https:\/\/api.github.com\/users\/BHM-RB\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BHM-RB\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BHM-RB\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BHM-RB\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BHM-RB\/orgs","repos_url":"https:\/\/api.github.com\/users\/BHM-RB\/repos","events_url":"https:\/\/api.github.com\/users\/BHM-RB\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BHM-RB\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-08T04:21:54Z","updated_at":"2021-04-08T12:13:19Z","closed_at":"2021-04-08T12:13:19Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I ran a simple code to list all texts in Timit dataset and the texts were all the same.\r\nIs this dataset corrupted?\r\n**Code:**\r\ntimit = load_dataset(\"timit_asr\")\r\nprint(*timit['train']['text'], sep='\\n')\r\n**Result:**\r\nWould such an act of refusal be useful?\r\nWould such an act of refusal be useful?\r\nWould such an act of refusal be useful?\r\nWould such an act of refusal be useful?\r\n...\r\n...\r\nWould such an act of refusal be useful?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2187\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2187","id":852939736,"node_id":"MDU6SXNzdWU4NTI5Mzk3MzY=","number":2187,"title":"Question (potential issue?) related to datasets caching","user":{"login":"ioana-blue","id":17202292,"node_id":"MDQ6VXNlcjE3MjAyMjky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17202292?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ioana-blue","html_url":"https:\/\/github.com\/ioana-blue","followers_url":"https:\/\/api.github.com\/users\/ioana-blue\/followers","following_url":"https:\/\/api.github.com\/users\/ioana-blue\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ioana-blue\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ioana-blue\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ioana-blue\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ioana-blue\/orgs","repos_url":"https:\/\/api.github.com\/users\/ioana-blue\/repos","events_url":"https:\/\/api.github.com\/users\/ioana-blue\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ioana-blue\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":12,"created_at":"2021-04-08T00:16:28Z","updated_at":"2021-04-14T14:55:58Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I thought I had disabled datasets caching in my code, as follows:\r\n```\r\nfrom datasets import set_caching_enabled\r\n...\r\ndef main():\r\n\r\n # disable caching in datasets\r\n set_caching_enabled(False)\r\n```\r\nHowever, in my log files I see messages like the following:\r\n\r\n```\r\n04\/07\/2021 18:34:42 - WARNING - datasets.builder - Using custom data configuration default-888a87931cbc5877\r\n04\/07\/2021 18:34:42 - WARNING - datasets.builder - Reusing dataset csv (xxxx\/cache-transformers\/datasets\/csv\/default-888a87931cbc5877\/0.0.0\/965b6429be0fc05f975b608ce64e1fa941cc8fb4f30629b523d2390f3c0e1a93\r\n```\r\nCan you please let me know what this reusing dataset csv means? I wouldn't expect any reusing with the datasets caching disabled. Thank you!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2186\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2186","id":852840819,"node_id":"MDExOlB1bGxSZXF1ZXN0NjExMDMxNzE0","number":2186,"title":"GEM: new challenge sets","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-07T21:39:07Z","updated_at":"2021-04-07T21:56:35Z","closed_at":"2021-04-07T21:56:35Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2186","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2186","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2186.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2186.patch"},"body":"This PR updates the GEM dataset to:\r\n- remove extraneous fields in WikiAuto after https:\/\/github.com\/huggingface\/datasets\/pull\/2171 fixed the source\r\n- add context and services to Schema Guided Dialog\r\n- Add new or update challenge sets for MLSUM ES and DE, XSUM, and SGD","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2185\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2185","id":852684395,"node_id":"MDU6SXNzdWU4NTI2ODQzOTU=","number":2185,"title":".map() and distributed training","user":{"login":"VictorSanh","id":16107619,"node_id":"MDQ6VXNlcjE2MTA3NjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16107619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/VictorSanh","html_url":"https:\/\/github.com\/VictorSanh","followers_url":"https:\/\/api.github.com\/users\/VictorSanh\/followers","following_url":"https:\/\/api.github.com\/users\/VictorSanh\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/VictorSanh\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/VictorSanh\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/VictorSanh\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/VictorSanh\/orgs","repos_url":"https:\/\/api.github.com\/users\/VictorSanh\/repos","events_url":"https:\/\/api.github.com\/users\/VictorSanh\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/VictorSanh\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-07T18:22:14Z","updated_at":"2021-04-09T15:38:31Z","closed_at":"2021-04-09T15:38:31Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\nI have a question regarding distributed training and the `.map` call on a dataset.\r\n\r\nI have a local dataset \"my_custom_dataset\" that I am loading with `datasets = load_from_disk(dataset_path=my_path)`.\r\n`dataset` is then tokenized:\r\n```python\r\ndatasets = load_from_disk(dataset_path=my_path)\r\n\r\n[...]\r\n\r\ndef tokenize_function(examples):\r\n return tokenizer(examples[text_column_name])\r\n\r\nlogger.info(\"Mapping dataset to tokenized dataset.\")\r\ntokenized_datasets = datasets.map(\r\n tokenize_function,\r\n batched=True,\r\n num_proc=preprocessing_num_workers,\r\n remove_columns=column_names,\r\n load_from_cache_file=True,\r\n)\r\n```\r\nI am using 31 workers (`preprocessing_num_workers=31`) and thus it creates 31 `cache*.arrow` files in `my_path\/train` (there is only a train split).\r\nWhen I relaunch the script, the map is tokenization is skipped in favor of loading the 31 previously cached files, and that's perfect.\r\n\r\nEverything so far was done by launching a **single process script**.\r\nI now launch the same training script in **distributed mode** (`pytorch -m torch.distributed.launch --nproc_per_node 2`). However, once it reaches the map call, it re-does the tokenization... instead of loading the 31 cached files. \r\n\r\nI tried adding the `cache_file_name` argument: `cache_file_name={\"train\": my_path\/one_of_the_arrow_file}`, but I can't give the 31 cached files, so it probably isn't the right way to do it.\r\n\r\n**My question: what is the best way to load cached files if they were pre-processed and dumped in multiple arrow files?** It seems automatically handled for single processes but fails on distributed training.\r\n\r\n- I am following the same structure as the examples of transformers (more specifically [run_clm.py](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_clm.py) in my case)\r\n- I am using 1.5.0 version of datasets if that matters.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2184\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2184","id":852597258,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwODIxMTc0","number":2184,"title":"Implementation of class_encode_column","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-07T16:47:43Z","updated_at":"2021-04-16T11:44:37Z","closed_at":"2021-04-16T11:26:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2184","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2184","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2184.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2184.patch"},"body":"Addresses #2176 \r\n\r\nI'm happy to discuss the API and internals!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2183\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2183","id":852518411,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwNzU3MjUz","number":2183,"title":"Fix s3fs tests for py36 and py37+","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-07T15:17:11Z","updated_at":"2021-04-08T08:54:45Z","closed_at":"2021-04-08T08:54:44Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2183","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2183","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2183.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2183.patch"},"body":"Recently several changes happened:\r\n1. latest versions of `fsspec` require python>3.7 for async features\r\n2. `s3fs` added a dependency on `aiobotocore`, which is not compatible with the `moto` s3 mock context manager\r\n\r\nThis PR fixes both issues, by pinning `fsspec` and `s3fs` for python 3.6, and by using `moto` in server mode to support running the tests on python>=3.7 with the latest version of `fsspec` and `s3fs`.\r\n\r\ncc @philschmid ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2182\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2182","id":852384872,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwNjQ2MDIy","number":2182,"title":"Set default in-memory value depending on the dataset size","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/1","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1\/labels","id":6644198,"node_id":"MDk6TWlsZXN0b25lNjY0NDE5OA==","number":1,"title":"1.6","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":4,"state":"closed","created_at":"2021-04-09T13:07:51Z","updated_at":"2021-04-20T16:50:46Z","due_on":"2021-04-16T07:00:00Z","closed_at":"2021-04-20T16:50:46Z"},"comments":4,"created_at":"2021-04-07T13:00:18Z","updated_at":"2021-04-20T14:20:12Z","closed_at":"2021-04-20T10:04:04Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2182","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2182","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2182.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2182.patch"},"body":"Set a default value for `in_memory` depending on the size of the dataset to be loaded.\r\n\r\nClose #2179.\r\n\r\nTODO:\r\n- [x] Add a section in the docs about this.\r\n- ~Add a warning if someone tries to specify `cache_file_name=` in `map`, `filter` etc. on a dataset that is in memory, since the computation is not going to be cached in this case.~","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2181\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2181","id":852261607,"node_id":"MDU6SXNzdWU4NTIyNjE2MDc=","number":2181,"title":"Error when loading a HUGE json file (pyarrow.lib.ArrowInvalid: straddling object straddles two block boundaries)","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-04-07T10:26:46Z","updated_at":"2021-04-12T07:15:55Z","closed_at":"2021-04-12T07:15:55Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, thanks for the great library. I have used the brilliant library for a couple of small projects, and now using it for a fairly big project.\r\nWhen loading a huge json file of 500GB, pyarrow complains as follows:\r\n```\r\nTraceback (most recent call last):\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 531, in incomplete_dir\r\n yield tmp_dir\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 573, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 650, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1027, in _prepare_split\r\n for key, table in utils.tqdm(generator, unit=\" tables\", leave=False, disable=not_verbose):\r\n File \"\/home\/user\/.pyenv\/versions\/3.7.9\/lib\/python3.7\/site-packages\/tqdm\/std.py\", line 1133, in __iter__\r\n for obj in iterable:\r\n File \"\/app\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/json\/9498524fd296a6cca99c66d6c5be507d1c0991f5a814e535b507f4a66096a641\/json.py\", line 83, in _generate_tables\r\n parse_options=self.config.pa_parse_options,\r\n File \"pyarrow\/_json.pyx\", line 247, in pyarrow._json.read_json\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: straddling object straddles two block boundaries (try to increase block size?)\r\n```\r\nWhen using only a small portion of the sample file, say first 100 lines, it works perfectly well..\r\n\r\nI see that it is the error from pyarrow, but could you give me a hint or possible solutions?\r\n#369 describes the same error and #372 claims to have fixed the issue, but I have no clue why I am still getting this one. Thanks in advance!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2180\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2180","id":852258635,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwNTQxOTA2","number":2180,"title":"Add tel to xtreme tatoeba","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-07T10:23:15Z","updated_at":"2021-04-07T15:50:35Z","closed_at":"2021-04-07T15:50:34Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2180","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2180","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2180.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2180.patch"},"body":"This should fix issue #2149 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2179\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2179","id":852237957,"node_id":"MDU6SXNzdWU4NTIyMzc5NTc=","number":2179,"title":"Load small datasets in-memory instead of using memory map","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-04-07T09:58:16Z","updated_at":"2021-04-20T10:04:04Z","closed_at":"2021-04-20T10:04:03Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Currently all datasets are loaded using memory mapping by default in `load_dataset`.\r\nHowever this might not be necessary for small datasets. If a dataset is small enough, then it can be loaded in-memory and:\r\n- its memory footprint would be small so it's ok\r\n- in-memory computations\/queries would be faster\r\n- the caching on-disk would be disabled, making computations even faster (no I\/O bound because of the disk)\r\n- but running the same computation a second time would recompute everything since there would be no cached results on-disk. But this is probably fine since computations would be fast anyway + users should be able to provide a cache filename if needed.\r\n\r\nTherefore, maybe the default behavior of `load_dataset` should be to load small datasets in-memory and big datasets using memory mapping.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2178\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2178","id":852215058,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwNTA1Mjg1","number":2178,"title":"Fix cast memory usage by using map on subtables","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/1","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1\/labels","id":6644198,"node_id":"MDk6TWlsZXN0b25lNjY0NDE5OA==","number":1,"title":"1.6","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":4,"state":"closed","created_at":"2021-04-09T13:07:51Z","updated_at":"2021-04-20T16:50:46Z","due_on":"2021-04-16T07:00:00Z","closed_at":"2021-04-20T16:50:46Z"},"comments":3,"created_at":"2021-04-07T09:30:50Z","updated_at":"2021-04-20T14:20:44Z","closed_at":"2021-04-13T09:28:16Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2178","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2178","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2178.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2178.patch"},"body":"The `cast` operation on a pyarrow Table may create new arrays in memory.\r\nThis is an issue since users expect memory mapped datasets to not fill up the RAM.\r\n\r\nTo fix that I used `map` to write a new arrow file on disk when cast is used.\r\nTo make things more convenient I introduced the `arrow` formatting of a dataset, to make it return pyarrow tables instead of python dicts. This way one can use pyarrow transforms directly when using `map`.\r\n\r\nedit: we'll use the same mechanism for `filter`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2177\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2177","id":852065307,"node_id":"MDExOlB1bGxSZXF1ZXN0NjEwMzc5MDYx","number":2177,"title":"add social thumbnial","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-07T06:40:06Z","updated_at":"2021-04-07T08:16:01Z","closed_at":"2021-04-07T08:16:01Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2177","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2177","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2177.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2177.patch"},"body":"# What does this PR do?\r\n\r\nI added OpenGraph\/ Twitter Card support to the docs to create nice social thumbnails.\r\n\r\n![Bildschirmfoto 2021-04-07 um 08 36 50](https:\/\/user-images.githubusercontent.com\/32632186\/113821698-bac2ce80-977c-11eb-81aa-d8f16355857e.png)\r\n\r\nTo be able to add these I needed to install `sphinxext-opengraph`. I came across this [issue](https:\/\/github.com\/readthedocs\/readthedocs.org\/issues\/1758) on the readthedocs repo saying that since someone has built this plugin they are not integrating and providing documentation to it. That's why I added it for creating the documentation. The repository can be found [here](https:\/\/github.com\/wpilibsuite\/sphinxext-opengraph\/tree\/main).\r\n\r\nP.S. It seemed that `make style` never ran for `docs\/` i hope the changes are okay otherwise I'll revert it. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2176\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2176","id":851865795,"node_id":"MDU6SXNzdWU4NTE4NjU3OTU=","number":2176,"title":"Converting a Value to a ClassLabel","user":{"login":"nelson-liu","id":7272031,"node_id":"MDQ6VXNlcjcyNzIwMzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7272031?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nelson-liu","html_url":"https:\/\/github.com\/nelson-liu","followers_url":"https:\/\/api.github.com\/users\/nelson-liu\/followers","following_url":"https:\/\/api.github.com\/users\/nelson-liu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nelson-liu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nelson-liu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nelson-liu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nelson-liu\/orgs","repos_url":"https:\/\/api.github.com\/users\/nelson-liu\/repos","events_url":"https:\/\/api.github.com\/users\/nelson-liu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nelson-liu\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-06T22:54:16Z","updated_at":"2021-04-19T10:10:34Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi!\r\n\r\nIn the docs for `cast`, it's noted that `For non-trivial conversion, e.g. string <-> ClassLabel you should use map() to update the Dataset.`\r\n\r\nWould it be possible to have an example that demonstrates such a string <-> ClassLabel conversion using `map`? Thanks!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2175\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2175","id":851836096,"node_id":"MDU6SXNzdWU4NTE4MzYwOTY=","number":2175,"title":"dataset.search_batch() function outputs all -1 indices sometime.","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-04-06T21:50:49Z","updated_at":"2021-04-16T12:21:16Z","closed_at":"2021-04-16T12:21:15Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I am working with RAG and playing around with different faiss indexes. At the moment I use **index = faiss.index_factory(768, \"IVF65536_HNSW32,Flat\")**.\r\n\r\nDuring the retrieval phase exactly in [this line of retrieval_rag.py](https:\/\/github.com\/huggingface\/transformers\/blob\/master\/src\/transformers\/models\/rag\/retrieval_rag.py#L231) an error issue when all retrieved indices are -1. Please refer to the screenshot of a PID worker. \r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/16892570\/113782387-37a67600-9786-11eb-9c29-acad661a9648.png)\r\n\r\n\r\nHere, my retrieve batch size is 2 and n_docs is 5. I can solve this by working around np. stack, but I want to ask, why we get an output index of -1. Do you have any idea :) ?\r\n\r\nIs this a problem of the index, where the faiss can't find any similar vector?\r\nIs there documentation on the output index being -1?\r\n\r\n@lhoestq \r\n ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2174\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2174","id":851383675,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5ODE2OTQ2","number":2174,"title":"Pin docutils for better doc","user":{"login":"sgugger","id":35901082,"node_id":"MDQ6VXNlcjM1OTAxMDgy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35901082?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sgugger","html_url":"https:\/\/github.com\/sgugger","followers_url":"https:\/\/api.github.com\/users\/sgugger\/followers","following_url":"https:\/\/api.github.com\/users\/sgugger\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sgugger\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sgugger\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sgugger\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sgugger\/orgs","repos_url":"https:\/\/api.github.com\/users\/sgugger\/repos","events_url":"https:\/\/api.github.com\/users\/sgugger\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sgugger\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-06T12:40:20Z","updated_at":"2021-04-06T12:55:53Z","closed_at":"2021-04-06T12:55:53Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2174","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2174","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2174.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2174.patch"},"body":"The latest release of docutils make the navbar in the documentation weird and the Markdown wrongly interpreted:\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/35901082\/113711773-5be55280-96b3-11eb-9b3b-9794f17709aa.png)\r\n\r\nWe had the same problem in Transformers and solved it by pinning docutils (a dep of sphinx).\r\n\r\nYou can see the version after the change [here](https:\/\/32769-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/index.html).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2173\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2173","id":851359284,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5Nzk2NzI2","number":2173,"title":"Add OpenSLR dataset","user":{"login":"cahya-wirawan","id":7669893,"node_id":"MDQ6VXNlcjc2Njk4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7669893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cahya-wirawan","html_url":"https:\/\/github.com\/cahya-wirawan","followers_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/followers","following_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/orgs","repos_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/repos","events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cahya-wirawan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-06T12:08:34Z","updated_at":"2021-04-12T16:54:46Z","closed_at":"2021-04-12T16:54:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2173","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2173","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2173.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2173.patch"},"body":"OpenSLR (https:\/\/openslr.org\/) is a site devoted to hosting speech and language resources, such as training corpora for speech recognition, and software related to speech recognition. There are around 80 speech datasets listed in OpenSLR, currently this PR includes only 9 speech datasets SLR41, SLR42, SLR43, SLR44, SLR63, SLR64, SLR65, SLR66 and SLR69 (Javanese, Khmer, Nepali and Sundanese, Malayalam, Marathi, Tamil, Telugu and Catalan). I can add other speech datasets gradually next time.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2172\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2172","id":851229399,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5Njg4ODgx","number":2172,"title":"Pin fsspec lower than 0.9.0","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-06T09:19:09Z","updated_at":"2021-04-06T09:49:27Z","closed_at":"2021-04-06T09:49:26Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2172","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2172","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2172.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2172.patch"},"body":"Today's release of `fsspec` 0.9.0 implied a new release of `s3fs` 0.6.0 but this version breaks the CI (see [here](https:\/\/app.circleci.com\/pipelines\/github\/huggingface\/datasets\/5312\/workflows\/490f3240-cd1c-4dd1-bb60-b416771c5584\/jobs\/32734) for example)\r\n\r\nI'm pinning `fsspec` until this has been resolved","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2171\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2171","id":851090662,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5NTY4MDcw","number":2171,"title":"Fixed the link to wikiauto training data.","user":{"login":"mounicam","id":11708999,"node_id":"MDQ6VXNlcjExNzA4OTk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/11708999?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mounicam","html_url":"https:\/\/github.com\/mounicam","followers_url":"https:\/\/api.github.com\/users\/mounicam\/followers","following_url":"https:\/\/api.github.com\/users\/mounicam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mounicam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mounicam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mounicam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mounicam\/orgs","repos_url":"https:\/\/api.github.com\/users\/mounicam\/repos","events_url":"https:\/\/api.github.com\/users\/mounicam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mounicam\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-06T07:13:11Z","updated_at":"2021-04-06T16:05:42Z","closed_at":"2021-04-06T16:05:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2171","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2171","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2171.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2171.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2170\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2170","id":850913228,"node_id":"MDU6SXNzdWU4NTA5MTMyMjg=","number":2170,"title":"Wikipedia historic dumps are deleted but hf\/datasets hardcodes dump date","user":{"login":"leezu","id":946903,"node_id":"MDQ6VXNlcjk0NjkwMw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/946903?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/leezu","html_url":"https:\/\/github.com\/leezu","followers_url":"https:\/\/api.github.com\/users\/leezu\/followers","following_url":"https:\/\/api.github.com\/users\/leezu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/leezu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/leezu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/leezu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/leezu\/orgs","repos_url":"https:\/\/api.github.com\/users\/leezu\/repos","events_url":"https:\/\/api.github.com\/users\/leezu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/leezu\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-06T03:13:18Z","updated_at":"2021-06-16T01:10:50Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Wikimedia does not keep all historical dumps. For example, as of today https:\/\/dumps.wikimedia.org\/kowiki\/ only provides\r\n\r\n```\r\n20201220\/ 02-Feb-2021 01:36 -\r\n20210101\/ 21-Feb-2021 01:26 -\r\n20210120\/ 02-Mar-2021 01:25 -\r\n20210201\/ 21-Mar-2021 01:26 -\r\n20210220\/ 02-Apr-2021 01:26 -\r\n20210301\/ 03-Mar-2021 08:10 -\r\n20210320\/ 21-Mar-2021 18:13 -\r\n20210401\/ 03-Apr-2021 10:08 -\r\nlatest\/ 03-Apr-2021 10:08 -\r\n```\r\n\r\nHowever, the wikipedia dataset provided in the library, only supports the following configs, none of which are applicable anymore when disregarding the cached datasets:\r\n\r\n```\r\nValueError: BuilderConfig 20210401.ko not found. Available: ['20200501.aa', '20200501.ab', '20200501.ace', '20200501.ady', '20200501.af', '20200501.ak', '20200501.als', '20200501.am', '20200501.an', '20200501.ang', '20200501.ar', '20200501.arc', '20200501.arz', '20200501.as', '20200501.ast', '20200501.atj', '20200501.av', '20200501.ay', '20200501.az', '20200501.azb', '20200501.ba', '20200501.bar', '20200501.bat-smg', '20200501.bcl', '20200501.be', '20200501.be-x-old', '20200501.bg', '20200501.bh', '20200501.bi', '20200501.bjn', '20200501.bm', '20200501.bn', '20200501.bo', '20200501.bpy', '20200501.br', '20200501.bs', '20200501.bug', '20200501.bxr', '20200501.ca', '20200501.cbk-zam', '20200501.cdo', '20200501.ce', '20200501.ceb', '20200501.ch', '20200501.cho', '20200501.chr', '20200501.chy', '20200501.ckb', '20200501.co', '20200501.cr', '20200501.crh', '20200501.cs', '20200501.csb', '20200501.cu', '20200501.cv', '20200501.cy', '20200501.da', '20200501.de', '20200501.din', '20200501.diq', '20200501.dsb', '20200501.dty', '20200501.dv', '20200501.dz', '20200501.ee', '20200501.el', '20200501.eml', '20200501.en', '20200501.eo', '20200501.es', '20200501.et', '20200501.eu', '20200501.ext', '20200501.fa', '20200501.ff', '20200501.fi', '20200501.fiu-vro', '20200501.fj', '20200501.fo', '20200501.fr', '20200501.frp', '20200501.frr', '20200501.fur', '20200501.fy', '20200501.ga', '20200501.gag', '20200501.gan', '20200501.gd', '20200501.gl', '20200501.glk', '20200501.gn', '20200501.gom', '20200501.gor', '20200501.got', '20200501.gu', '20200501.gv', '20200501.ha', '20200501.hak', '20200501.haw', '20200501.he', '20200501.hi', '20200501.hif', '20200501.ho', '20200501.hr', '20200501.hsb', '20200501.ht', '20200501.hu', '20200501.hy', '20200501.ia', '20200501.id', '20200501.ie', '20200501.ig', '20200501.ii', '20200501.ik', '20200501.ilo', '20200501.inh', '20200501.io', '20200501.is', '20200501.it', '20200501.iu', '20200501.ja', '20200501.jam', '20200501.jbo', '20200501.jv', '20200501.ka', '20200501.kaa', '20200501.kab', '20200501.kbd', '20200501.kbp', '20200501.kg', '20200501.ki', '20200501.kj', '20200501.kk', '20200501.kl', '20200501.km', '20200501.kn', '20200501.ko', '20200501.koi', '20200501.krc', '20200501.ks', '20200501.ksh', '20200501.ku', '20200501.kv', '20200501.kw', '20200501.ky', '20200501.la', '20200501.lad', '20200501.lb', '20200501.lbe', '20200501.lez', '20200501.lfn', '20200501.lg', '20200501.li', '20200501.lij', '20200501.lmo', '20200501.ln', '20200501.lo', '20200501.lrc', '20200501.lt', '20200501.ltg', '20200501.lv', '20200501.mai', '20200501.map-bms', '20200501.mdf', '20200501.mg', '20200501.mh', '20200501.mhr', '20200501.mi', '20200501.min', '20200501.mk', '20200501.ml', '20200501.mn', '20200501.mr', '20200501.mrj', '20200501.ms', '20200501.mt', '20200501.mus', '20200501.mwl', '20200501.my', '20200501.myv', '20200501.mzn', '20200501.na', '20200501.nah', '20200501.nap', '20200501.nds', '20200501.nds-nl', '20200501.ne', '20200501.new', '20200501.ng', '20200501.nl', '20200501.nn', '20200501.no', '20200501.nov', '20200501.nrm', '20200501.nso', '20200501.nv', '20200501.ny', '20200501.oc', '20200501.olo', '20200501.om', '20200501.or', '20200501.os', '20200501.pa', '20200501.pag', '20200501.pam', '20200501.pap', '20200501.pcd', '20200501.pdc', '20200501.pfl', '20200501.pi', '20200501.pih', '20200501.pl', '20200501.pms', '20200501.pnb', '20200501.pnt', '20200501.ps', '20200501.pt', '20200501.qu', '20200501.rm', '20200501.rmy', '20200501.rn', '20200501.ro', '20200501.roa-rup', '20200501.roa-tara', '20200501.ru', '20200501.rue', '20200501.rw', '20200501.sa', '20200501.sah', '20200501.sat', '20200501.sc', '20200501.scn', '20200501.sco', '20200501.sd', '20200501.se', '20200501.sg', '20200501.sh', '20200501.si', '20200501.simple', '20200501.sk', '20200501.sl', '20200501.sm', '20200501.sn', '20200501.so', '20200501.sq', '20200501.sr', '20200501.srn', '20200501.ss', '20200501.st', '20200501.stq', '20200501.su', '20200501.sv', '20200501.sw', '20200501.szl', '20200501.ta', '20200501.tcy', '20200501.te', '20200501.tet', '20200501.tg', '20200501.th', '20200501.ti', '20200501.tk', '20200501.tl', '20200501.tn', '20200501.to', '20200501.tpi', '20200501.tr', '20200501.ts', '20200501.tt', '20200501.tum', '20200501.tw', '20200501.ty', '20200501.tyv', '20200501.udm', '20200501.ug', '20200501.uk', '20200501.ur', '20200501.uz', '20200501.ve', '20200501.vec', '20200501.vep', '20200501.vi', '20200501.vls', '20200501.vo', '20200501.wa', '20200501.war', '20200501.wo', '20200501.wuu', '20200501.xal', '20200501.xh', '20200501.xmf', '20200501.yi', '20200501.yo', '20200501.za', '20200501.zea', '20200501.zh', '20200501.zh-classical', '20200501.zh-min-nan', '20200501.zh-yue', '20200501.zu']\r\n```\r\n\r\nThe cached datasets:\r\n\r\n```\r\n% aws s3 --no-sign-request --endpoint-url https:\/\/storage.googleapis.com ls s3:\/\/huggingface-nlp\/cache\/datasets\/wikipedia\/\r\n PRE 20200501.de\/\r\n PRE 20200501.en\/\r\n PRE 20200501.fr\/\r\n PRE 20200501.frr\/\r\n PRE 20200501.it\/\r\n PRE 20200501.simple\/\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2169\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2169","id":850456180,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA5MDI2ODUz","number":2169,"title":"Updated WER metric implementation to avoid memory issues","user":{"login":"diego-fustes","id":5707233,"node_id":"MDQ6VXNlcjU3MDcyMzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5707233?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/diego-fustes","html_url":"https:\/\/github.com\/diego-fustes","followers_url":"https:\/\/api.github.com\/users\/diego-fustes\/followers","following_url":"https:\/\/api.github.com\/users\/diego-fustes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/diego-fustes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/diego-fustes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/diego-fustes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/diego-fustes\/orgs","repos_url":"https:\/\/api.github.com\/users\/diego-fustes\/repos","events_url":"https:\/\/api.github.com\/users\/diego-fustes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/diego-fustes\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-05T15:43:20Z","updated_at":"2021-04-06T15:02:58Z","closed_at":"2021-04-06T15:02:58Z","author_association":"NONE","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2169","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2169","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2169.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2169.patch"},"body":"This is in order to fix this issue:\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/issues\/2078\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2168\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2168","id":849957941,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA4NjA4Nzg5","number":2168,"title":"Preserve split type when realoding dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-04-04T20:46:21Z","updated_at":"2021-04-19T10:57:05Z","closed_at":"2021-04-19T09:08:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2168","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2168","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2168.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2168.patch"},"body":"Fixes #2167 \r\n\r\nUsing `eval` is not ideal for security reasons (in web apps I assume), but without it the code would be much more complex IMO.\r\n\r\nIn terms of style, instead of explicitly importing a private member (`_RelativeInstruction`), we can add these imports at the top of the module:\r\n```python\r\nfrom . import arrow_reader # gives us access to ReadInstruction and _RelativeInstruction\r\nfrom . import splits # gives us access to NamedSplit\r\n```\r\n\r\nand then define the `eval` globals as follows:\r\n```python\r\n{**arrow_reader.__dict__, **splits.__dict__}\r\n```\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2167\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2167","id":849944891,"node_id":"MDU6SXNzdWU4NDk5NDQ4OTE=","number":2167,"title":" Split type not preserved when reloading the dataset","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-04T19:29:54Z","updated_at":"2021-04-19T09:08:55Z","closed_at":"2021-04-19T09:08:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"A minimal reproducible example:\r\n```python\r\n>>> from datasets import load_dataset, Dataset\r\n>>> dset = load_dataset(\"sst\", split=\"train\")\r\n>>> dset.save_to_disk(\"sst\")\r\n>>> type(dset.split)\r\n\r\n>>> dset = Dataset.load_from_disk(\"sst\")\r\n>>> type(dset.split) # NamedSplit expected\r\n\r\n```\r\n\r\nIt seems like this bug was introduced in #2025.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2166\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2166","id":849778545,"node_id":"MDU6SXNzdWU4NDk3Nzg1NDU=","number":2166,"title":"Regarding Test Sets for the GEM datasets","user":{"login":"vyraun","id":17217068,"node_id":"MDQ6VXNlcjE3MjE3MDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17217068?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vyraun","html_url":"https:\/\/github.com\/vyraun","followers_url":"https:\/\/api.github.com\/users\/vyraun\/followers","following_url":"https:\/\/api.github.com\/users\/vyraun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vyraun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vyraun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vyraun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vyraun\/orgs","repos_url":"https:\/\/api.github.com\/users\/vyraun\/repos","events_url":"https:\/\/api.github.com\/users\/vyraun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vyraun\/received_events","type":"User","site_admin":false},"labels":[{"id":2067401494,"node_id":"MDU6TGFiZWwyMDY3NDAxNDk0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/Dataset%20discussion","name":"Dataset discussion","color":"72f99f","default":false,"description":"Discussions on the datasets"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-04T02:02:45Z","updated_at":"2021-04-06T08:13:12Z","closed_at":"2021-04-06T08:13:12Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"@yjernite Hi, are the test sets for the GEM datasets scheduled to be [added soon](https:\/\/gem-benchmark.com\/shared_task)? \r\n\r\ne.g.\r\n\r\n```\r\nfrom datasets import load_dataset\r\nDATASET_NAME=\"common_gen\"\r\ndata = load_dataset(\"gem\", DATASET_NAME)\r\n```\r\n\r\nThe test set doesn't have the target or references.\r\n\r\n```\r\ndata['test'][0]\r\n{'concept_set_id': 0, 'concepts': ['drill', 'field', 'run', 'team'], 'gem_id': 'common_gen-test-0', 'gem_parent_id': 'common_gen-test-0', 'references': [], 'target': ''}\r\n```\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2165\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2165","id":849771665,"node_id":"MDU6SXNzdWU4NDk3NzE2NjU=","number":2165,"title":"How to convert datasets.arrow_dataset.Dataset to torch.utils.data.Dataset","user":{"login":"y-rokutan","id":24562381,"node_id":"MDQ6VXNlcjI0NTYyMzgx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24562381?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/y-rokutan","html_url":"https:\/\/github.com\/y-rokutan","followers_url":"https:\/\/api.github.com\/users\/y-rokutan\/followers","following_url":"https:\/\/api.github.com\/users\/y-rokutan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/y-rokutan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/y-rokutan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/y-rokutan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/y-rokutan\/orgs","repos_url":"https:\/\/api.github.com\/users\/y-rokutan\/repos","events_url":"https:\/\/api.github.com\/users\/y-rokutan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/y-rokutan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-04-04T01:01:48Z","updated_at":"2021-05-02T11:01:06Z","closed_at":"2021-04-07T15:06:04Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm trying to pretraine deep-speed model using HF arxiv dataset like:\r\n```\r\ntrain_ds = nlp.load_dataset('scientific_papers', 'arxiv')\r\ntrain_ds.set_format(\r\n type=\"torch\",\r\n columns=[\"input_ids\", \"attention_mask\", \"global_attention_mask\", \"labels\"],\r\n )\r\nengine, _, _, _ = deepspeed.initialize(\r\n args=args,\r\n model=model,\r\n model_parameters=[p for p in model.parameters() if p.requires_grad],\r\n training_data=train_ds)\r\n```\r\nbut deepspeed.initialize accepts torch.utils.data.Dataset only. How can I convert HF-style dataset to torch-style dataset?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2164\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2164","id":849739759,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA4NDQ0MTE3","number":2164,"title":"Replace assertTrue(isinstance with assertIsInstance in tests","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-04-03T21:07:02Z","updated_at":"2021-04-06T14:41:09Z","closed_at":"2021-04-06T14:41:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2164","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2164","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2164.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2164.patch"},"body":"Replaces all the occurrences of the `assertTrue(isinstance(` pattern with `assertIsInstance`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2163\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2163","id":849669366,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA4Mzk0NDMz","number":2163,"title":"Concat only unique fields in DatasetInfo.from_merge","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-04-03T14:31:30Z","updated_at":"2021-04-06T14:40:00Z","closed_at":"2021-04-06T14:39:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2163","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2163","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2163.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2163.patch"},"body":"I thought someone from the community with less experience would be interested in fixing this issue, but that wasn't the case.\r\n\r\nFixes #2103 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2162\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2162","id":849129201,"node_id":"MDU6SXNzdWU4NDkxMjkyMDE=","number":2162,"title":"visualization for cc100 is broken ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-02T10:11:13Z","updated_at":"2021-04-07T13:01:07Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nvisualization through dataset viewer for cc100 is broken\r\nhttps:\/\/huggingface.co\/datasets\/viewer\/\r\n\r\nthanks a lot\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2161\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2161","id":849127041,"node_id":"MDU6SXNzdWU4NDkxMjcwNDE=","number":2161,"title":"any possibility to download part of large datasets only?","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-04-02T10:06:46Z","updated_at":"2021-07-02T15:19:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nSome of the datasets I need like cc100 are very large, and then I wonder if I can download first X samples of the shuffled\/unshuffled data without going through first downloading the whole data then sampling? thanks","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2160\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2160","id":849052921,"node_id":"MDU6SXNzdWU4NDkwNTI5MjE=","number":2160,"title":"data_args.preprocessing_num_workers almost freezes ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-04-02T07:56:13Z","updated_at":"2021-04-02T10:14:32Z","closed_at":"2021-04-02T10:14:31Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi @lhoestq \r\n\r\nI am running this code from huggingface transformers https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_mlm.py \r\n\r\nto speed up tokenization, since I am running on multiple datasets, I am using data_args.preprocessing_num_workers = 4 with opus100 corpus but this moves on till a point and then this freezes almost for sometime during tokenization steps and then this is back again, overall to me taking more time than normal case, I appreciate your advice on how I can use this option properly to speed up.\r\n\r\nthanks","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2159\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2159","id":848851962,"node_id":"MDU6SXNzdWU4NDg4NTE5NjI=","number":2159,"title":"adding ccnet dataset","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-01T23:28:36Z","updated_at":"2021-04-02T10:05:19Z","closed_at":"2021-04-02T10:05:19Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** ccnet\r\n\r\n- **Description:** \r\nCommon Crawl\r\n\r\n- **Paper:** \r\nhttps:\/\/arxiv.org\/abs\/1911.00359\r\n\r\n- **Data:** \r\nhttps:\/\/github.com\/facebookresearch\/cc_net\r\n\r\n- **Motivation:**\r\nthis is one of the most comprehensive clean monolingual datasets across a variety of languages. Quite important for cross-lingual reseach\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n\r\nthanks","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2158\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2158","id":848506746,"node_id":"MDU6SXNzdWU4NDg1MDY3NDY=","number":2158,"title":"viewer \"fake_news_english\" error","user":{"login":"emanuelevivoli","id":9447991,"node_id":"MDQ6VXNlcjk0NDc5OTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9447991?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/emanuelevivoli","html_url":"https:\/\/github.com\/emanuelevivoli","followers_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/followers","following_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/orgs","repos_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/repos","events_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/emanuelevivoli\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-04-01T14:13:20Z","updated_at":"2021-04-07T10:26:09Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"When I visit the [Huggingface - viewer](https:\/\/huggingface.co\/datasets\/viewer\/) web site, under the dataset \"fake_news_english\" I've got this error:\r\n\r\n> ImportError: To be able to use this dataset, you need to install the following dependencies['openpyxl'] using 'pip install # noqa: requires this pandas optional dependency for reading xlsx files' for instance'\r\n\r\nas well as the error Traceback.\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2157\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2157","id":847205239,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA2MjM1NjUx","number":2157,"title":"updated user permissions based on umask","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-31T19:38:29Z","updated_at":"2021-04-06T07:19:19Z","closed_at":"2021-04-06T07:19:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2157","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2157","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2157.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2157.patch"},"body":"Updated user permissions based on running user's umask (#2065). Let me know if `0o666` is looking good or should I change it to `~umask` only (to give execute permissions as well) ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2156\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2156","id":847198295,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA2MjI5MTky","number":2156,"title":"User permissions","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-31T19:33:48Z","updated_at":"2021-03-31T19:34:24Z","closed_at":"2021-03-31T19:34:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2156","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2156","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2156.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2156.patch"},"body":"Updated user permissions based on running user's umask. Let me know if `0o666` is looking good or should I change it to `~umask` only (to give execute permissions as well)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2155\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2155","id":846786897,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA1ODU3MTU4","number":2155,"title":"Add table classes to the documentation","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-31T14:36:10Z","updated_at":"2021-04-01T16:46:30Z","closed_at":"2021-03-31T15:42:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2155","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2155","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2155.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2155.patch"},"body":"Following #2025 , I added the table classes to the documentation\r\n\r\ncc @albertvillanova ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2154\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2154","id":846763960,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA1ODM2Mjc1","number":2154,"title":"Adding the NorNE dataset for Norwegian POS and NER","user":{"login":"versae","id":173537,"node_id":"MDQ6VXNlcjE3MzUzNw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/173537?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/versae","html_url":"https:\/\/github.com\/versae","followers_url":"https:\/\/api.github.com\/users\/versae\/followers","following_url":"https:\/\/api.github.com\/users\/versae\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/versae\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/versae\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/versae\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/versae\/orgs","repos_url":"https:\/\/api.github.com\/users\/versae\/repos","events_url":"https:\/\/api.github.com\/users\/versae\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/versae\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-31T14:22:50Z","updated_at":"2021-04-01T09:27:00Z","closed_at":"2021-04-01T09:16:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2154","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2154","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2154.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2154.patch"},"body":"NorNE is a manually annotated corpus of named entities which extends the annotation of the existing Norwegian Dependency Treebank. Comprising both of the official standards of written Norwegian (Bokm\u00e5l and Nynorsk), the corpus contains around 600,000 tokens and annotates a rich set of entity types including persons, organizations, locations, geo-political entities, products, and events, in addition to a class corresponding to nominals derived from names.\r\n\r\nSee #1720.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2153\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2153","id":846181502,"node_id":"MDU6SXNzdWU4NDYxODE1MDI=","number":2153,"title":"load_dataset ignoring features","user":{"login":"GuillemGSubies","id":37592763,"node_id":"MDQ6VXNlcjM3NTkyNzYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/37592763?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/GuillemGSubies","html_url":"https:\/\/github.com\/GuillemGSubies","followers_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/followers","following_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/orgs","repos_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/repos","events_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/GuillemGSubies\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-03-31T08:30:09Z","updated_at":"2021-08-11T11:39:18Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"First of all, I'm sorry if it is a repeated issue or the changes are already in master, I searched and I didn't find anything. \r\n\r\nI'm using datasets 1.5.0\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/37592763\/113114369-8f376580-920b-11eb-900d-94365b59f04b.png)\r\n\r\nAs you can see, when I load the dataset, the ClassLabels are ignored, I have to cast the dataset in order to make it work.\r\n\r\nCode to reproduce:\r\n\r\n```python\r\nimport datasets\r\ndata_location = \"\/data\/prueba_multiclase\"\r\nfeatures = datasets.Features(\r\n {\"texto\": datasets.Value(\"string\"), \"label\": datasets.features.ClassLabel(names=[\"false\", \"true\"])}\r\n )\r\ndataset = datasets.load_dataset(\r\n \"csv\", data_files=data_location, delimiter=\"\\t\", features=features\r\n )\r\n```\r\n\r\nDataset I used:\r\n\r\n\r\n[prueba_multiclase.zip](https:\/\/github.com\/huggingface\/datasets\/files\/6235022\/prueba_multiclase.zip) (it has to be unzipped)\r\n\r\n\r\nThank you! \u2764\ufe0f \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2152\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2152","id":845751273,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA0ODk0MDkz","number":2152,"title":"Update README.md","user":{"login":"JieyuZhao","id":22306304,"node_id":"MDQ6VXNlcjIyMzA2MzA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22306304?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JieyuZhao","html_url":"https:\/\/github.com\/JieyuZhao","followers_url":"https:\/\/api.github.com\/users\/JieyuZhao\/followers","following_url":"https:\/\/api.github.com\/users\/JieyuZhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JieyuZhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JieyuZhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JieyuZhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JieyuZhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/JieyuZhao\/repos","events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-31T03:21:19Z","updated_at":"2021-04-01T10:20:37Z","closed_at":"2021-04-01T10:20:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2152","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2152","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2152.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2152.patch"},"body":"Updated some descriptions of Wino_Bias dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2151\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2151","id":844886081,"node_id":"MDExOlB1bGxSZXF1ZXN0NjA0MDg5MDMw","number":2151,"title":"Add support for axis in concatenate datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/1","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/1\/labels","id":6644198,"node_id":"MDk6TWlsZXN0b25lNjY0NDE5OA==","number":1,"title":"1.6","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":4,"state":"closed","created_at":"2021-04-09T13:07:51Z","updated_at":"2021-04-20T16:50:46Z","due_on":"2021-04-16T07:00:00Z","closed_at":"2021-04-20T16:50:46Z"},"comments":5,"created_at":"2021-03-30T16:58:44Z","updated_at":"2021-06-23T17:41:02Z","closed_at":"2021-04-19T16:07:18Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2151","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2151","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2151.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2151.patch"},"body":"Add support for `axis` (0 or 1) in `concatenate_datasets`.\r\n\r\nClose #853.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2150\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2150","id":844776448,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzOTg3OTcx","number":2150,"title":"Allow pickling of big in-memory tables","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-30T15:51:56Z","updated_at":"2021-03-31T10:37:15Z","closed_at":"2021-03-31T10:37:14Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2150","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2150","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2150.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2150.patch"},"body":"This should fix issue #2134 \r\n\r\nPickling is limited to <4GiB objects, it's not possible to pickle a big arrow table (for multiprocessing for example).\r\nFor big tables, we have to write them on disk and only pickle the path to the table.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2149\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2149","id":844734076,"node_id":"MDU6SXNzdWU4NDQ3MzQwNzY=","number":2149,"title":"Telugu subset missing for xtreme tatoeba dataset","user":{"login":"jerryIsHere","id":50871412,"node_id":"MDQ6VXNlcjUwODcxNDEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/50871412?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jerryIsHere","html_url":"https:\/\/github.com\/jerryIsHere","followers_url":"https:\/\/api.github.com\/users\/jerryIsHere\/followers","following_url":"https:\/\/api.github.com\/users\/jerryIsHere\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jerryIsHere\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jerryIsHere\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jerryIsHere\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jerryIsHere\/orgs","repos_url":"https:\/\/api.github.com\/users\/jerryIsHere\/repos","events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jerryIsHere\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-30T15:26:34Z","updated_at":"2021-04-07T10:23:35Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"from nlp import load_dataset\r\ntrain_dataset = load_dataset('xtreme', 'tatoeba.tel')['validation']\r\nValueError: BuilderConfig tatoeba.tel not found.\r\n\r\nbut language tel is actually included in xtreme:\r\nhttps:\/\/github.com\/google-research\/xtreme\/blob\/master\/utils_preprocess.py\r\ndef tatoeba_preprocess(args):\r\n lang3_dict = {\r\n 'afr':'af', 'ara':'ar', 'bul':'bg', 'ben':'bn',\r\n 'deu':'de', 'ell':'el', 'spa':'es', 'est':'et',\r\n 'eus':'eu', 'pes':'fa', 'fin':'fi', 'fra':'fr',\r\n 'heb':'he', 'hin':'hi', 'hun':'hu', 'ind':'id',\r\n 'ita':'it', 'jpn':'ja', 'jav':'jv', 'kat':'ka',\r\n 'kaz':'kk', 'kor':'ko', 'mal':'ml', 'mar':'mr',\r\n 'nld':'nl', 'por':'pt', 'rus':'ru', 'swh':'sw',\r\n 'tam':'ta', **_'tel':'te'_**, 'tha':'th', 'tgl':'tl', <----here\r\n 'tur':'tr', 'urd':'ur', 'vie':'vi', 'cmn':'zh',\r\n 'eng':'en',\r\n }","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2148\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2148","id":844700910,"node_id":"MDU6SXNzdWU4NDQ3MDA5MTA=","number":2148,"title":"Add configurable options to `seqeval` metric","user":{"login":"marrodion","id":44571847,"node_id":"MDQ6VXNlcjQ0NTcxODQ3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44571847?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/marrodion","html_url":"https:\/\/github.com\/marrodion","followers_url":"https:\/\/api.github.com\/users\/marrodion\/followers","following_url":"https:\/\/api.github.com\/users\/marrodion\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/marrodion\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/marrodion\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/marrodion\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/marrodion\/orgs","repos_url":"https:\/\/api.github.com\/users\/marrodion\/repos","events_url":"https:\/\/api.github.com\/users\/marrodion\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/marrodion\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-30T15:04:06Z","updated_at":"2021-04-15T13:49:46Z","closed_at":"2021-04-15T13:49:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Right now `load_metric(\"seqeval\")` only works in the default mode of evaluation (equivalent to conll evaluation).\r\n\r\nHowever, seqeval library [supports](https:\/\/github.com\/chakki-works\/seqeval#support-features) different evaluation schemes (IOB1, IOB2, etc.), which can be plugged in just by supporting additional kwargs in `Seqeval._compute`\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/85cf7ff920c90ca2e12bedca12b36d2a043c3da2\/metrics\/seqeval\/seqeval.py#L109\r\n\r\nThings that would be relevant are, for example, supporting `mode=\"strict\", scheme=IOB2` to count only full entity match as a true positive and omit partial matches.\r\n\r\nThe only problem I see is that the spirit of `metrics` seems to not require additional imports from user. `seqeval` only supports schemes as objects, without any string aliases. \r\n\r\nIt can be solved naively with mapping like `{\"IOB2\": seqeval.scheme.IOB2}`. Or just left as is and require user to explicitly import scheme from `seqeval` if he wants to configure it past the default implementation.\r\n\r\nIf that makes sense, I am happy to implement the change.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2147\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2147","id":844687831,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzOTA3NjM4","number":2147,"title":"Render docstring return type as inline","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-30T14:55:43Z","updated_at":"2021-03-31T13:11:05Z","closed_at":"2021-03-31T13:11:05Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2147","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2147","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2147.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2147.patch"},"body":"This documentation setting will avoid having the return type in a separate line under `Return type`. \r\n\r\nSee e.g. current docs for `Dataset.to_csv`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2146\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2146","id":844673244,"node_id":"MDU6SXNzdWU4NDQ2NzMyNDQ=","number":2146,"title":"Dataset file size on disk is very large with 3D Array","user":{"login":"jblemoine","id":22685854,"node_id":"MDQ6VXNlcjIyNjg1ODU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22685854?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jblemoine","html_url":"https:\/\/github.com\/jblemoine","followers_url":"https:\/\/api.github.com\/users\/jblemoine\/followers","following_url":"https:\/\/api.github.com\/users\/jblemoine\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jblemoine\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jblemoine\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jblemoine\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jblemoine\/orgs","repos_url":"https:\/\/api.github.com\/users\/jblemoine\/repos","events_url":"https:\/\/api.github.com\/users\/jblemoine\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jblemoine\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-30T14:46:09Z","updated_at":"2021-04-16T13:07:02Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, \r\n\r\nI have created my own dataset using the provided dataset loading script. It is an image dataset where images are stored as 3D Array with dtype=uint8. \r\n\r\nThe actual size on disk is surprisingly large. It takes 520 MB. Here is some info from `dataset_info.json`. \r\n\r\n`{\r\n \"description\": \"\",\r\n \"citation\": \"\",\r\n \"homepage\": \"\",\r\n \"license\": \"\",\r\n \"features\": {\r\n \"image\": {\r\n \"shape\": [224, 224, 3],\r\n \"dtype\": \"uint8\",\r\n \"id\": null,\r\n \"_type\": \"Array3D\",\r\n }\r\n },\r\n \"post_processed\": null,\r\n \"supervised_keys\": null,\r\n \"builder_name\": \"shot_type_image_dataset\",\r\n \"config_name\": \"default\",\r\n \"version\": {\r\n \"version_str\": \"0.0.0\",\r\n \"description\": null,\r\n \"major\": 0,\r\n \"minor\": 0,\r\n \"patch\": 0,\r\n },\r\n \"splits\": {\r\n \"train\": {\r\n \"name\": \"train\",\r\n \"num_bytes\": 520803408,\r\n \"num_examples\": 1479,\r\n \"dataset_name\": \"shot_type_image_dataset\",\r\n }\r\n },\r\n \"download_checksums\": {\r\n \"\": {\r\n \"num_bytes\": 16940447118,\r\n \"checksum\": \"5854035705efe08b0ed8f3cf3da7b4d29cba9055c2d2d702c79785350d72ee03\",\r\n }\r\n },\r\n \"download_size\": 16940447118,\r\n \"post_processing_size\": null,\r\n \"dataset_size\": 520803408,\r\n \"size_in_bytes\": 17461250526,\r\n}`\r\n\r\nI have created the same dataset with tensorflow_dataset and it takes only 125MB on disk.\r\n\r\nI am wondering, is it normal behavior ? I understand `Datasets` uses Arrow for serialization wheres tf uses TF Records.\r\n\r\nThis might be a problem for large dataset. \r\n\r\nThanks for your help. \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2145\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2145","id":844603518,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzODMxOTE2","number":2145,"title":"Implement Dataset add_column","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/3","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3\/labels","id":6644287,"node_id":"MDk6TWlsZXN0b25lNjY0NDI4Nw==","number":3,"title":"1.7","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":3,"state":"closed","created_at":"2021-04-09T13:16:31Z","updated_at":"2021-05-31T16:20:53Z","due_on":"2021-05-14T07:00:00Z","closed_at":"2021-05-31T16:20:53Z"},"comments":1,"created_at":"2021-03-30T14:02:14Z","updated_at":"2021-04-29T14:50:44Z","closed_at":"2021-04-29T14:50:43Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2145","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2145","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2145.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2145.patch"},"body":"Implement `Dataset.add_column`.\r\n\r\nClose #1954.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2144\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2144","id":844352067,"node_id":"MDU6SXNzdWU4NDQzNTIwNjc=","number":2144,"title":"Loading wikipedia 20200501.en throws pyarrow related error","user":{"login":"TomPyonsuke","id":26637405,"node_id":"MDQ6VXNlcjI2NjM3NDA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26637405?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/TomPyonsuke","html_url":"https:\/\/github.com\/TomPyonsuke","followers_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/followers","following_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/orgs","repos_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/repos","events_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/TomPyonsuke\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-30T10:38:31Z","updated_at":"2021-04-01T09:21:17Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"**Problem description**\r\nI am getting the following error when trying to load wikipedia\/20200501.en dataset.\r\n\r\n**Error log**\r\nDownloading and preparing dataset wikipedia\/20200501.en (download: 16.99 GiB, generated: 17.07 GiB, post-processed: Unknown size, total: 34.06 GiB) to \/usr\/local\/workspace\/NAS_NLP\/cache\/wikipedia\/20200501.en\/1.0.0\/50aa706aa417bb77d910ad61211cc672c0ef3e0f224225a5e0a18277ade8b931...\r\nDownloading: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 14.6k\/14.6k [00:00<00:00, 5.41MB\/s]\r\nDownloading: 59%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 10.7G\/18.3G [11:30<08:08, 15.5MB\/s]\r\nDataset wikipedia downloaded and prepared to \/usr\/local\/workspace\/NAS_NLP\/cache\/wikipedia\/20200501.en\/1.0.0\/50aa706aa417bb77d910ad61211cc672c0ef3e0f224225a5e0a18277ade8b931. Subsequent calls will reuse this data.\r\nTraceback (most recent call last):\r\n File \"load_wiki.py\", line 2, in \r\n ds = load_dataset('wikipedia', '20200501.en', cache_dir='\/usr\/local\/workspace\/NAS_NLP\/cache')\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/load.py\", line 751, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py\", line 746, in as_dataset\r\n map_tuple=True,\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/py_utils.py\", line 204, in map_nested\r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/py_utils.py\", line 204, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/utils\/py_utils.py\", line 142, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py\", line 763, in _build_single_dataset\r\n in_memory=in_memory,\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/builder.py\", line 835, in _as_dataset\r\n in_memory=in_memory,\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 215, in read\r\n return self.read_files(files=files, original_instructions=instructions, in_memory=in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 236, in read_files\r\n pa_table = self._read_files(files, in_memory=in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 171, in _read_files\r\n pa_table: pa.Table = self._get_dataset_from_filename(f_dict, in_memory=in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 302, in _get_dataset_from_filename\r\n pa_table = ArrowReader.read_table(filename, in_memory=in_memory)\r\n File \"\/usr\/local\/lib\/python3.6\/dist-packages\/datasets\/arrow_reader.py\", line 324, in read_table\r\n pa_table = f.read_all()\r\n File \"pyarrow\/ipc.pxi\", line 544, in pyarrow.lib.RecordBatchReader.read_all\r\n File \"pyarrow\/error.pxi\", line 99, in pyarrow.lib.check_status\r\nOSError: Expected to be able to read 9176784 bytes for message body, got 4918712\r\n\r\n**Detailed version info**\r\ndatasets==1.5.0\r\n - dataclasses [required: Any, installed: 0.8]\r\n - dill [required: Any, installed: 0.3.3]\r\n - fsspec [required: Any, installed: 0.8.7]\r\n - importlib-metadata [required: Any, installed: 1.7.0]\r\n - zipp [required: >=0.5, installed: 3.1.0]\r\n - huggingface-hub [required: <0.1.0, installed: 0.0.7]\r\n - filelock [required: Any, installed: 3.0.12]\r\n - importlib-metadata [required: Any, installed: 1.7.0]\r\n - zipp [required: >=0.5, installed: 3.1.0]\r\n - requests [required: Any, installed: 2.24.0]\r\n - certifi [required: >=2017.4.17, installed: 2020.6.20]\r\n - chardet [required: >=3.0.2,<4, installed: 3.0.4]\r\n - idna [required: >=2.5,<3, installed: 2.6]\r\n - urllib3 [required: >=1.21.1,<1.26,!=1.25.1,!=1.25.0, installed: 1.25.10]\r\n - tqdm [required: Any, installed: 4.49.0]\r\n - importlib-metadata [required: Any, installed: 1.7.0]\r\n - zipp [required: >=0.5, installed: 3.1.0]\r\n - multiprocess [required: Any, installed: 0.70.11.1]\r\n - dill [required: >=0.3.3, installed: 0.3.3]\r\n - numpy [required: >=1.17, installed: 1.17.0]\r\n - pandas [required: Any, installed: 1.1.5]\r\n - numpy [required: >=1.15.4, installed: 1.17.0]\r\n - python-dateutil [required: >=2.7.3, installed: 2.8.0]\r\n - six [required: >=1.5, installed: 1.15.0]\r\n - pytz [required: >=2017.2, installed: 2020.1]\r\n - pyarrow [required: >=0.17.1, installed: 3.0.0]\r\n - numpy [required: >=1.16.6, installed: 1.17.0]\r\n - requests [required: >=2.19.0, installed: 2.24.0]\r\n - certifi [required: >=2017.4.17, installed: 2020.6.20]\r\n - chardet [required: >=3.0.2,<4, installed: 3.0.4]\r\n - idna [required: >=2.5,<3, installed: 2.6]\r\n - urllib3 [required: >=1.21.1,<1.26,!=1.25.1,!=1.25.0, installed: 1.25.10]\r\n - tqdm [required: >=4.27,<4.50.0, installed: 4.49.0]\r\n - xxhash [required: Any, installed: 2.0.0]\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2143\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2143","id":844313228,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzNTc0NjI0","number":2143,"title":"task casting via load_dataset","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"assignees":[{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-03-30T10:00:42Z","updated_at":"2021-06-11T13:20:41Z","closed_at":"2021-06-11T13:20:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2143","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2143","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2143.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2143.patch"},"body":"wip\r\nnot satisfied with the API, it means as a dataset implementer I need to write a function with boilerplate and write classes for each `` \"facet\".","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2142\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2142","id":843919420,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzMjQwMzUy","number":2142,"title":"Gem V1.1","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T23:47:02Z","updated_at":"2021-03-30T00:10:02Z","closed_at":"2021-03-30T00:10:02Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2142","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2142","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2142.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2142.patch"},"body":"This branch updates the GEM benchmark to its 1.1 version which includes:\r\n- challenge sets for most tasks\r\n- detokenized TurkCorpus to match the rest of the text simplification subtasks\r\n- fixed inputs for TurkCorpus and ASSET test sets\r\n- 18 languages in WikiLingua\r\n\r\ncc @sebastianGehrmann","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2141\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2141","id":843914790,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzMjM2MjUw","number":2141,"title":"added spans field for the wikiann datasets","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-29T23:38:26Z","updated_at":"2021-03-31T13:27:50Z","closed_at":"2021-03-31T13:27:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2141","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2141","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2141.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2141.patch"},"body":"Hi @lhoestq \r\nI tried to add spans to the wikiann datasets.\r\nThanks a lot for kindly having a look.\r\nThis addresses https:\/\/github.com\/huggingface\/datasets\/issues\/2130. \r\nBest regards\r\nRabeeh ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2140\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2140","id":843830451,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAzMTYxMjYx","number":2140,"title":"add banking77 dataset","user":{"login":"dkajtoch","id":32985207,"node_id":"MDQ6VXNlcjMyOTg1MjA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32985207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dkajtoch","html_url":"https:\/\/github.com\/dkajtoch","followers_url":"https:\/\/api.github.com\/users\/dkajtoch\/followers","following_url":"https:\/\/api.github.com\/users\/dkajtoch\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dkajtoch\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dkajtoch\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dkajtoch\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dkajtoch\/orgs","repos_url":"https:\/\/api.github.com\/users\/dkajtoch\/repos","events_url":"https:\/\/api.github.com\/users\/dkajtoch\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dkajtoch\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-29T21:32:23Z","updated_at":"2021-04-09T09:32:18Z","closed_at":"2021-04-09T09:32:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2140","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2140","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2140.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2140.patch"},"body":"Intent classification\/detection dataset from banking category with 77 unique intents.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2139\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2139","id":843662613,"node_id":"MDU6SXNzdWU4NDM2NjI2MTM=","number":2139,"title":"TypeError when using save_to_disk in a dataset loaded with ReadInstruction split","user":{"login":"PedroMLF","id":22480495,"node_id":"MDQ6VXNlcjIyNDgwNDk1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22480495?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PedroMLF","html_url":"https:\/\/github.com\/PedroMLF","followers_url":"https:\/\/api.github.com\/users\/PedroMLF\/followers","following_url":"https:\/\/api.github.com\/users\/PedroMLF\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PedroMLF\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PedroMLF\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PedroMLF\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PedroMLF\/orgs","repos_url":"https:\/\/api.github.com\/users\/PedroMLF\/repos","events_url":"https:\/\/api.github.com\/users\/PedroMLF\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PedroMLF\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-29T18:23:54Z","updated_at":"2021-03-30T09:12:53Z","closed_at":"2021-03-30T09:12:53Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nLoading a dataset with `load_dataset` using a split defined via `ReadInstruction` and then saving it to disk results in the following error: `TypeError: Object of type ReadInstruction is not JSON serializable`.\r\n\r\nHere is the minimal reproducible example:\r\n\r\n```python\r\nfrom datasets import load_dataset\r\nfrom datasets import ReadInstruction\r\n\r\ndata_1 = load_dataset(\r\n \"wikiann\",\r\n \"en\",\r\n split=\"validation\",\r\n)\r\n\r\ndata_1.save_to_disk(\"temporary_path_1\")\r\n\r\nprint(\"Save with regular split works.\")\r\n\r\ndata_2 = load_dataset(\r\n \"wikiann\",\r\n \"en\",\r\n split=ReadInstruction(\"validation\", to=50, unit=\"%\"),\r\n)\r\n\r\ndata_2.save_to_disk(\"temporary_path_2\")\r\n```\r\n\r\nand the corresponding output:\r\n\r\n```\r\nReusing dataset wikiann (\/xxxxx\/.cache\/huggingface\/datasets\/wikiann\/en\/1.1.0\/0b11a6fb31eea02f38ca17610657bfba3206100685283014daceb8da291c3be9)\r\nSave with regular split works.\r\nReusing dataset wikiann (\/xxxxx\/.cache\/huggingface\/datasets\/wikiann\/en\/1.1.0\/0b11a6fb31eea02f38ca17610657bfba3206100685283014daceb8da291c3be9)\r\nTraceback (most recent call last):\r\n File \"bug.py\", line 20, in \r\n data_2.save_to_disk(\"temporary_path_2\")\r\n File \"\/xxxxx\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 645, in save_to_disk\r\n json.dump(state, state_file, indent=2, sort_keys=True)\r\n File \"\/usr\/lib\/python3.7\/json\/__init__.py\", line 179, in dump\r\n for chunk in iterable:\r\n File \"\/usr\/lib\/python3.7\/json\/encoder.py\", line 431, in _iterencode\r\n yield from _iterencode_dict(o, _current_indent_level)\r\n File \"\/usr\/lib\/python3.7\/json\/encoder.py\", line 405, in _iterencode_dict\r\n yield from chunks\r\n File \"\/usr\/lib\/python3.7\/json\/encoder.py\", line 438, in _iterencode\r\n o = _default(o)\r\n File \"\/usr\/lib\/python3.7\/json\/encoder.py\", line 179, in default\r\n raise TypeError(f'Object of type {o.__class__.__name__} '\r\nTypeError: Object of type ReadInstruction is not JSON serializable\r\n```\r\n\r\nLet me know if there is some misuse from my end.\r\n\r\nThanks in advance.\r\n ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2138\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2138","id":843508402,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyODc4NzU2","number":2138,"title":"Add CER metric","user":{"login":"chutaklee","id":6931004,"node_id":"MDQ6VXNlcjY5MzEwMDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6931004?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/chutaklee","html_url":"https:\/\/github.com\/chutaklee","followers_url":"https:\/\/api.github.com\/users\/chutaklee\/followers","following_url":"https:\/\/api.github.com\/users\/chutaklee\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/chutaklee\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/chutaklee\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/chutaklee\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/chutaklee\/orgs","repos_url":"https:\/\/api.github.com\/users\/chutaklee\/repos","events_url":"https:\/\/api.github.com\/users\/chutaklee\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/chutaklee\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T15:52:27Z","updated_at":"2021-04-06T16:16:11Z","closed_at":"2021-04-06T07:14:38Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2138","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2138","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2138.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2138.patch"},"body":"Add Character Error Rate (CER) metric that is used in evaluation in ASR. I also have written unittests (hopefully thorough enough) but I'm not sure how to integrate them into the existed codebase.\r\n\r\n```python\r\nfrom cer import CER\r\n\r\ncer = CER()\r\n\r\nclass TestCER(unittest.TestCase):\r\n def test_cer_case_senstive(self):\r\n refs = ['White House']\r\n preds = ['white house']\r\n # S = 2, D = 0, I = 0, N = 11, CER = 2 \/ 11\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.1818181818) < 1e-6)\r\n\r\n def test_cer_whitespace(self):\r\n refs = ['were wolf']\r\n preds = ['werewolf']\r\n # S = 0, D = 0, I = 1, N = 9, CER = 1 \/ 9\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.1111111) < 1e-6)\r\n\r\n refs = ['werewolf']\r\n preds = ['weae wolf']\r\n # S = 1, D = 1, I = 0, N = 8, CER = 0.25\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.25) < 1e-6)\r\n\r\n # consecutive whitespaces case 1\r\n refs = ['were wolf']\r\n preds = ['were wolf']\r\n # S = 0, D = 0, I = 0, N = 9, CER = 0\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.0) < 1e-6)\r\n\r\n # consecutive whitespaces case 2\r\n refs = ['were wolf']\r\n preds = ['were wolf']\r\n # S = 0, D = 0, I = 0, N = 9, CER = 0\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.0) < 1e-6)\r\n\r\n def test_cer_sub(self):\r\n refs = ['werewolf']\r\n preds = ['weaewolf']\r\n # S = 1, D = 0, I = 0, N = 8, CER = 0.125\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.125) < 1e-6)\r\n\r\n def test_cer_del(self):\r\n refs = ['werewolf']\r\n preds = ['wereawolf']\r\n # S = 0, D = 1, I = 0, N = 8, CER = 0.125\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.125) < 1e-6)\r\n\r\n def test_cer_insert(self):\r\n refs = ['werewolf']\r\n preds = ['wereolf']\r\n # S = 0, D = 0, I = 1, N = 8, CER = 0.125\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.125) < 1e-6)\r\n\r\n def test_cer_equal(self):\r\n refs = ['werewolf']\r\n char_error_rate = cer.compute(predictions=refs, references=refs)\r\n self.assertEqual(char_error_rate, 0.0)\r\n\r\n def test_cer_list_of_seqs(self):\r\n refs = ['werewolf', 'I am your father']\r\n char_error_rate = cer.compute(predictions=refs, references=refs)\r\n self.assertEqual(char_error_rate, 0.0)\r\n\r\n refs = ['werewolf', 'I am your father', 'doge']\r\n preds = ['werxwolf', 'I am your father', 'doge']\r\n # S = 1, D = 0, I = 0, N = 28, CER = 1 \/ 28\r\n char_error_rate = cer.compute(predictions=preds, references=refs)\r\n self.assertTrue(abs(char_error_rate - 0.03571428) < 1e-6)\r\n\r\n def test_cer_unicode(self):\r\n ref = [u'\u6211\u80fd\u541e\u4e0b\u73bb\u7483\u800c\u4e0d\u4f24\u8eab\u4f53']\r\n pred = [u' \u80fd\u541e\u867e\u73bb\u7483\u800c \u4e0d\u971c\u8eab\u4f53\u5566']\r\n # S = 3, D = 2, I = 0, N = 11\r\n # CER = 5 \/ 11\r\n char_error_rate = cer.compute(predictions=pred, references=ref)\r\n self.assertTrue(abs(char_error_rate - 0.4545454545) < 1e-6)\r\n\r\n ref = [u'\u6211\u80fd\u541e', u'\u4e0b\u73bb\u7483\u800c\u4e0d\u4f24\u8eab\u4f53']\r\n pred = [u'\u6211 \u80fd \u541e \u4e0b \u73bb \u7483', u'\u800c\u4e0d\u4f24\u8eab\u4f53']\r\n # S = 0, D = 5, I = 0, N = 11\r\n # CER = 5 \/ 11\r\n char_error_rate = cer.compute(predictions=pred, references=ref)\r\n self.assertTrue(abs(char_error_rate - 0.454545454545) < 1e-6)\r\n\r\n ref = [u'\u6211\u80fd\u541e\u4e0b\u73bb\u7483\u800c\u4e0d\u4f24\u8eab\u4f53']\r\n char_error_rate = cer.compute(predictions=ref, references=ref)\r\n self.assertFalse(char_error_rate, 0.0)\r\n\r\n def test_cer_empty(self):\r\n ref = ''\r\n pred = 'Hypothesis'\r\n with self.assertRaises(ValueError):\r\n char_error_rate = cer.compute(predictions=pred, references=ref)\r\n\r\nif __name__ == '__main__':\r\n unittest.main()\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2137\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2137","id":843502835,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyODc0MDYw","number":2137,"title":"Fix missing infos from concurrent dataset loading","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T15:46:12Z","updated_at":"2021-03-31T10:35:56Z","closed_at":"2021-03-31T10:35:55Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2137","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2137","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2137.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2137.patch"},"body":"This should fix issue #2131 \r\n\r\nWhen calling `load_dataset` at the same time from 2 workers, one of the worker could have missing split infos when reloading the dataset from the cache.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2136\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2136","id":843492015,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyODY0ODY5","number":2136,"title":"fix dialogue action slot name and value","user":{"login":"adamlin120","id":31605305,"node_id":"MDQ6VXNlcjMxNjA1MzA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31605305?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adamlin120","html_url":"https:\/\/github.com\/adamlin120","followers_url":"https:\/\/api.github.com\/users\/adamlin120\/followers","following_url":"https:\/\/api.github.com\/users\/adamlin120\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adamlin120\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adamlin120\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adamlin120\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adamlin120\/orgs","repos_url":"https:\/\/api.github.com\/users\/adamlin120\/repos","events_url":"https:\/\/api.github.com\/users\/adamlin120\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adamlin120\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T15:34:13Z","updated_at":"2021-03-31T12:48:02Z","closed_at":"2021-03-31T12:48:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2136","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2136","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2136.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2136.patch"},"body":"fix #2128","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2135\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2135","id":843246344,"node_id":"MDU6SXNzdWU4NDMyNDYzNDQ=","number":2135,"title":"en language data from MLQA dataset is missing","user":{"login":"rabeehk","id":6278280,"node_id":"MDQ6VXNlcjYyNzgyODA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6278280?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rabeehk","html_url":"https:\/\/github.com\/rabeehk","followers_url":"https:\/\/api.github.com\/users\/rabeehk\/followers","following_url":"https:\/\/api.github.com\/users\/rabeehk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rabeehk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rabeehk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rabeehk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rabeehk\/orgs","repos_url":"https:\/\/api.github.com\/users\/rabeehk\/repos","events_url":"https:\/\/api.github.com\/users\/rabeehk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rabeehk\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-29T10:47:50Z","updated_at":"2021-03-30T10:20:23Z","closed_at":"2021-03-30T10:20:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI need mlqa-translate-train.en dataset, but it is missing from the MLQA dataset. could you have a look please? @lhoestq thank you for your help to fix this issue. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2134","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2134\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2134\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2134\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2134","id":843242849,"node_id":"MDU6SXNzdWU4NDMyNDI4NDk=","number":2134,"title":"Saving large in-memory datasets with save_to_disk crashes because of pickling","user":{"login":"prokopCerny","id":5815801,"node_id":"MDQ6VXNlcjU4MTU4MDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5815801?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/prokopCerny","html_url":"https:\/\/github.com\/prokopCerny","followers_url":"https:\/\/api.github.com\/users\/prokopCerny\/followers","following_url":"https:\/\/api.github.com\/users\/prokopCerny\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/prokopCerny\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/prokopCerny\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/prokopCerny\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/prokopCerny\/orgs","repos_url":"https:\/\/api.github.com\/users\/prokopCerny\/repos","events_url":"https:\/\/api.github.com\/users\/prokopCerny\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/prokopCerny\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-03-29T10:43:15Z","updated_at":"2021-05-03T17:59:21Z","closed_at":"2021-05-03T17:59:21Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Using Datasets 1.5.0 on Python 3.7.\r\nRecently I've been working on medium to large size datasets (pretokenized raw text sizes from few gigabytes to low tens of gigabytes), and have found out that several preprocessing steps are massively faster when done in memory, and I have the ability to requisition a lot of RAM, so I decided to do these steps completely out of the datasets library.\r\n\r\n So my workflow is to do several .map() on datasets object, then for the operation which is faster in memory to extract the necessary columns from the dataset and then drop it whole, do the transformation in memory, and then create a fresh Dataset object using .from_dict() or other method. \r\n\r\nWhen I then try to call save_to_disk(path) on the dataset, it crashes because of pickling, which appears to be because of using old pickle protocol which doesn't support large files (over 4 GiB).\r\n```\r\nTraceback (most recent call last):\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 80, in \r\n main()\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 75, in main\r\n tokenize_and_chunkify(config)\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 60, in tokenize_and_chunkify\r\n contexts_dataset.save_to_disk(chunked_path)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 457, in save_to_disk\r\n self = pickle.loads(pickle.dumps(self))\r\nOverflowError: cannot serialize a bytes object larger than 4 GiB\r\n```\r\nFrom what I've seen this issue may be possibly fixed, as the line `self = pickle.loads(pickle.dumps(self))` does not appear to be present in the current state of the repository.\r\n\r\nTo save these datasets to disk, I've resorted to calling .map() over them with `function=None` and specifying the .arrow cache file, and then creating a new dataset using the .from_file() method, which I can then safely save to disk.\r\n\r\nAdditional issue when working with these large in-memory datasets is when using multiprocessing, is again to do with pickling. I've tried to speed up the mapping with function=None by specifying num_proc to the available cpu count, and I again get issues with transferring the dataset, with the following traceback. I am not sure if I should open a separate issue for that.\r\n```\r\nTraceback (most recent call last):\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 94, in \r\n main()\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 89, in main\r\n tokenize_and_chunkify(config)\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 67, in tokenize_and_chunkify\r\n contexts_dataset.map(function=None, cache_file_name=str(output_dir_path \/ \"tmp.arrow\"), writer_batch_size=50000, num_proc=config.threads)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1485, in map\r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1485, in \r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 657, in get\r\n raise self._value\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 431, in _handle_tasks\r\n put(task)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/connection.py\", line 209, in send\r\n self._send_bytes(_ForkingPickler.dumps(obj))\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/reduction.py\", line 54, in dumps\r\n cls(buf, protocol, *args, **kwds).dump(obj)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 454, in dump\r\n StockPickler.dump(self, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 437, in dump\r\n self.save(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 662, in save_reduce\r\n save(state)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 843, in _batch_appends\r\n save(x)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 843, in _batch_appends\r\n save(x)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 732, in save_bytes\r\n self._write_large_bytes(BINBYTES + pack(\"\r\n main()\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 89, in main\r\n tokenize_and_chunkify(config)\r\n File \".\/tokenize_and_chunkify_in_memory.py\", line 67, in tokenize_and_chunkify\r\n contexts_dataset.map(function=None, cache_file_name=str(output_dir_path \/ \"tmp.arrow\"), writer_batch_size=50000, num_proc=config.threads)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1485, in map\r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1485, in \r\n transformed_shards = [r.get() for r in results]\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 657, in get\r\n raise self._value\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/pool.py\", line 431, in _handle_tasks\r\n put(task)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/connection.py\", line 209, in send\r\n self._send_bytes(_ForkingPickler.dumps(obj))\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/multiprocess\/reduction.py\", line 54, in dumps\r\n cls(buf, protocol, *args, **kwds).dump(obj)\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 454, in dump\r\n StockPickler.dump(self, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 437, in dump\r\n self.save(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 662, in save_reduce\r\n save(state)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/home\/cernypro\/dev\/envs\/huggingface_gpu\/lib\/python3.7\/site-packages\/dill\/_dill.py\", line 941, in save_module_dict\r\n StockPickler.save_dict(pickler, obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 859, in save_dict\r\n self._batch_setitems(obj.items())\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 885, in _batch_setitems\r\n save(v)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 843, in _batch_appends\r\n save(x)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 846, in _batch_appends\r\n save(tmp[0])\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 789, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 819, in save_list\r\n self._batch_appends(obj)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 843, in _batch_appends\r\n save(x)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 549, in save\r\n self.save_reduce(obj=obj, *rv)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 638, in save_reduce\r\n save(args)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 774, in save_tuple\r\n save(element)\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 504, in save\r\n f(self, obj) # Call unbound method with explicit self\r\n File \"\/mnt\/appl\/software\/Python\/3.7.4-GCCcore-8.3.0\/lib\/python3.7\/pickle.py\", line 732, in save_bytes\r\n self._write_large_bytes(BINBYTES + pack(\"\r\n73 | \u00a0 | main()\r\n74 | \u00a0 | File \"run_gpt.py\", line 222, in main\r\n75 | \u00a0 | delimiter=\"\\t\", column_names=[\"input_ids\", \"attention_mask\", \"chinese_ref\"])\r\n76 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 747, in load_dataset\r\n77 | \u00a0 | use_auth_token=use_auth_token,\r\n78 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 513, in download_and_prepare\r\n79 | \u00a0 | self.download_post_processing_resources(dl_manager)\r\n80 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 673, in download_post_processing_resources\r\n81 | \u00a0 | for split in self.info.splits:\r\n82 | \u00a0 | TypeError: 'NoneType' object is not iterable\r\n83 | \u00a0 | WARNING:datasets.builder:Reusing dataset csv (\/usr\/local\/app\/.cache\/huggingface\/datasets\/csv\/default-1c257ebd48e225e7\/0.0.0\/2960f95a26e85d40ca41a230ac88787f715ee3003edaacb8b1f0891e9f04dda2)\r\n84 | \u00a0 | Traceback (most recent call last):\r\n85 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/runpy.py\", line 193, in _run_module_as_main\r\n86 | \u00a0 | \"__main__\", mod_spec)\r\n87 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/runpy.py\", line 85, in _run_code\r\n88 | \u00a0 | exec(code, run_globals)\r\n89 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/torch\/distributed\/launch.py\", line 340, in \r\n90 | \u00a0 | main()\r\n91 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/torch\/distributed\/launch.py\", line 326, in main\r\n92 | \u00a0 | sigkill_handler(signal.SIGTERM, None) # not coming back\r\n93 | \u00a0 | File \"\/data\/miniconda3\/lib\/python3.7\/site-packages\/torch\/distributed\/launch.py\", line 301, in sigkill_handler\r\n94 | \u00a0 | raise subprocess.CalledProcessError(returncode=last_return_code, cmd=cmd)\r\n\r\n```\r\nOn worker 1 it loads the dataset well, however on worker 2 will get this error. \r\nAnd I will meet this error from time to time, sometimes it just goes well.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2130\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2130","id":843111936,"node_id":"MDU6SXNzdWU4NDMxMTE5MzY=","number":2130,"title":"wikiann dataset is missing columns ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-03-29T08:23:00Z","updated_at":"2021-07-31T06:16:16Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nWikiann dataset needs to have \"spans\" columns, which is necessary to be able to use this dataset, but this column is missing from huggingface datasets, could you please have a look? thank you @lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2129\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2129","id":843033656,"node_id":"MDU6SXNzdWU4NDMwMzM2NTY=","number":2129,"title":"How to train BERT model with next sentence prediction?","user":{"login":"jnishi","id":836541,"node_id":"MDQ6VXNlcjgzNjU0MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/836541?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/jnishi","html_url":"https:\/\/github.com\/jnishi","followers_url":"https:\/\/api.github.com\/users\/jnishi\/followers","following_url":"https:\/\/api.github.com\/users\/jnishi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/jnishi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/jnishi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/jnishi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/jnishi\/orgs","repos_url":"https:\/\/api.github.com\/users\/jnishi\/repos","events_url":"https:\/\/api.github.com\/users\/jnishi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/jnishi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-29T06:48:03Z","updated_at":"2021-04-01T04:58:40Z","closed_at":"2021-04-01T04:58:40Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello.\r\n\r\nI'm trying to pretrain the BERT model with next sentence prediction. Is there any function that supports next sentence prediction \r\nlike ` TextDatasetForNextSentencePrediction` of `huggingface\/transformers` ?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2128\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2128","id":843023910,"node_id":"MDU6SXNzdWU4NDMwMjM5MTA=","number":2128,"title":"Dialogue action slot name and value are reversed in MultiWoZ 2.2","user":{"login":"adamlin120","id":31605305,"node_id":"MDQ6VXNlcjMxNjA1MzA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/31605305?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adamlin120","html_url":"https:\/\/github.com\/adamlin120","followers_url":"https:\/\/api.github.com\/users\/adamlin120\/followers","following_url":"https:\/\/api.github.com\/users\/adamlin120\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adamlin120\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adamlin120\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adamlin120\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adamlin120\/orgs","repos_url":"https:\/\/api.github.com\/users\/adamlin120\/repos","events_url":"https:\/\/api.github.com\/users\/adamlin120\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adamlin120\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-29T06:34:02Z","updated_at":"2021-03-31T12:48:01Z","closed_at":"2021-03-31T12:48:01Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi @yjernite, thank you for adding MultiWoZ 2.2 in the huggingface datasets platform. It is beneficial!\r\n\r\nI spot an error that the order of Dialogue action slot names and values are reversed.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/649b2c469779bc4221e1b6969aa2496d63eb5953\/datasets\/multi_woz_v22\/multi_woz_v22.py#L251-L262","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2127\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2127","id":843017199,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyNDYxMzc3","number":2127,"title":"make documentation more clear to use different cloud storage","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-29T06:24:06Z","updated_at":"2021-03-29T12:16:24Z","closed_at":"2021-03-29T12:16:24Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2127","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2127","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2127.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2127.patch"},"body":"This PR extends the cloud storage documentation. To show you can use a different `fsspec` implementation. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2126\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2126","id":842779966,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAyMjcyMjg4","number":2126,"title":"Replace legacy torch.Tensor constructor with torch.tensor","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-28T16:57:30Z","updated_at":"2021-03-29T09:27:14Z","closed_at":"2021-03-29T09:27:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2126","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2126","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2126.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2126.patch"},"body":"The title says it all (motivated by [this issue](https:\/\/github.com\/pytorch\/pytorch\/issues\/53146) in the pytorch repo).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2125\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2125","id":842690570,"node_id":"MDU6SXNzdWU4NDI2OTA1NzA=","number":2125,"title":"Is dataset timit_asr broken?","user":{"login":"kosuke-kitahara","id":42398050,"node_id":"MDQ6VXNlcjQyMzk4MDUw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42398050?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kosuke-kitahara","html_url":"https:\/\/github.com\/kosuke-kitahara","followers_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/followers","following_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/orgs","repos_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/repos","events_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kosuke-kitahara\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-28T08:30:18Z","updated_at":"2021-03-28T12:29:25Z","closed_at":"2021-03-28T12:29:25Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Using `timit_asr` dataset, I saw all records are the same.\r\n\r\n``` python\r\nfrom datasets import load_dataset, load_metric\r\n\r\ntimit = load_dataset(\"timit_asr\")\r\n\r\nfrom datasets import ClassLabel\r\nimport random\r\nimport pandas as pd\r\nfrom IPython.display import display, HTML\r\n\r\ndef show_random_elements(dataset, num_examples=10):\r\n assert num_examples <= len(dataset), \"Can't pick more elements than there are in the dataset.\"\r\n picks = []\r\n for _ in range(num_examples):\r\n pick = random.randint(0, len(dataset)-1)\r\n while pick in picks:\r\n pick = random.randint(0, len(dataset)-1)\r\n picks.append(pick)\r\n\r\n df = pd.DataFrame(dataset[picks])\r\n display(HTML(df.to_html()))\r\n\r\n\r\nshow_random_elements(timit['train'].remove_columns([\"file\", \"phonetic_detail\", \"word_detail\", \"dialect_region\", \"id\", \r\n \"sentence_type\", \"speaker_id\"]), num_examples=20)\r\n\r\n```\r\n\r\n`output`\r\n\r\n\"Screen\r\n\r\n\r\nI double-checked it [here](https:\/\/huggingface.co\/datasets\/viewer\/), and met the same problem.\r\n\r\n\"Screen\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2124\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2124","id":842627729,"node_id":"MDU6SXNzdWU4NDI2Mjc3Mjk=","number":2124,"title":"Adding ScaNN library to do MIPS?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-28T00:07:00Z","updated_at":"2021-03-29T13:23:43Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"@lhoestq Hi I am thinking of adding this new google library to do the MIPS similar to **add_faiss_idex**. As the paper suggests, it is really fast when it comes to retrieving the nearest neighbors. \r\n\r\nhttps:\/\/github.com\/google-research\/google-research\/tree\/master\/scann\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/16892570\/112738294-78ec9800-8fc6-11eb-9a5f-3d7ee5818e76.png)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2123\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2123","id":842577285,"node_id":"MDU6SXNzdWU4NDI1NzcyODU=","number":2123,"title":"Problem downloading GEM wiki_auto_asset_turk dataset","user":{"login":"mille-s","id":29705940,"node_id":"MDQ6VXNlcjI5NzA1OTQw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29705940?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mille-s","html_url":"https:\/\/github.com\/mille-s","followers_url":"https:\/\/api.github.com\/users\/mille-s\/followers","following_url":"https:\/\/api.github.com\/users\/mille-s\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mille-s\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mille-s\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mille-s\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mille-s\/orgs","repos_url":"https:\/\/api.github.com\/users\/mille-s\/repos","events_url":"https:\/\/api.github.com\/users\/mille-s\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mille-s\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-03-27T18:41:28Z","updated_at":"2021-05-12T16:15:18Z","closed_at":"2021-05-12T16:15:17Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"@yjernite \r\n\r\n### Summary\r\n\r\nI am currently working on the GEM datasets and do not manage to download the wiki_auto_asset_turk data, whereas all other datasets download well with the same code.\r\n\r\n### Steps to reproduce\r\nCode snippet:\r\n\r\nfrom datasets import load_dataset\r\n#dataset = load_dataset('gem', 'web_nlg_en')\r\ndataset = load_dataset('gem', 'wiki_auto_asset_turk')\r\n\r\n```\r\n\r\n**Expected behavior:**\r\n\r\nI expect the dataset to start downloading (download bar appears and progresses toward 100%)\r\n\r\n**Actual behavior:**\r\nInstead of seeing the download bar appearing, nothing happens; the following appears in the console as expected, but nothing more:\r\n\r\nDownloading: 36.6kB [00:00, 37.2MB\/s]\r\nDownloading: 41.7kB [00:00, ?B\/s]\r\nDownloading and preparing dataset gem\/wiki_auto_asset_turk (download: 121.37 MiB, generated: 145.69 MiB, post-processed: Unknown size, total: 267.07 MiB) to C:\\Users\\sfmil\\.cache\\huggingface\\datasets\\gem\\wiki_auto_asset_turk\\1.0.0\\f252756d7f1b8f019aac71a1623b2950acfe10d25d956668ac4eae4e93c58b8d...\r\n\r\n### Is this a regression?\r\nNo, it was the first time I was trying to download this dataset (same for the other ones).\r\n\r\n### Debug info\r\n- Python version: Python 3.8.2\r\n- OS version: Windows 10 Family","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2122\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2122","id":842194588,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAxODE3MjI0","number":2122,"title":"Fast table queries with interpolation search","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-26T18:09:20Z","updated_at":"2021-08-04T18:11:59Z","closed_at":"2021-04-06T14:33:01Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2122","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2122","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2122.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2122.patch"},"body":"## Intro\r\n\r\nThis should fix issue #1803 \r\n\r\nCurrently querying examples in a dataset is O(n) because of the underlying pyarrow ChunkedArrays implementation.\r\nTo fix this I implemented interpolation search that is pretty effective since datasets usually verifies the condition of evenly distributed chunks (the default chunk size is fixed).\r\n\r\n## Benchmark\r\n\r\nHere is a [benchmark](https:\/\/pastebin.com\/utEXUqsR) I did on bookcorpus (74M rows):\r\n\r\nfor the current implementation\r\n```python\r\n>>> python speed.py\r\nLoaded dataset 'bookcorpus', len=74004228, nbytes=4835358766\r\n\r\n\r\n========================= Querying unshuffled bookcorpus =========================\r\n\r\nAvg access time key=1 : 0.018ms\r\nAvg access time key=74004227 : 0.215ms\r\nAvg access time key=range(74003204, 74004228) : 1.416ms\r\nAvg access time key=RandIter(low=0, high=74004228, size=1024, seed=42): 92.532ms\r\n\r\n========================== Querying shuffled bookcorpus ==========================\r\n\r\nAvg access time key=1 : 0.187ms\r\nAvg access time key=74004227 : 6.642ms\r\nAvg access time key=range(74003204, 74004228) : 90.941ms\r\nAvg access time key=RandIter(low=0, high=74004228, size=1024, seed=42): 3448.456ms\r\n```\r\n\r\nfor the new one using interpolation search:\r\n```python\r\n>>> python speed.py\r\nLoaded dataset 'bookcorpus', len=74004228, nbytes=4835358766\r\n\r\n\r\n========================= Querying unshuffled bookcorpus =========================\r\n\r\nAvg access time key=1 : 0.076ms\r\nAvg access time key=74004227 : 0.056ms\r\nAvg access time key=range(74003204, 74004228) : 1.807ms\r\nAvg access time key=RandIter(low=0, high=74004228, size=1024, seed=42): 24.028ms\r\n\r\n========================== Querying shuffled bookcorpus ==========================\r\n\r\nAvg access time key=1 : 0.061ms\r\nAvg access time key=74004227 : 0.058ms\r\nAvg access time key=range(74003204, 74004228) : 22.166ms\r\nAvg access time key=RandIter(low=0, high=74004228, size=1024, seed=42): 42.757ms\r\n```\r\n\r\nThe RandIter class is just an iterable of 1024 random indices from 0 to 74004228.\r\n\r\nHere is also a plot showing the speed improvement depending on the dataset size:\r\n![image](https:\/\/user-images.githubusercontent.com\/42851186\/112673587-32335c80-8e65-11eb-9a0c-58ad774abaec.png)\r\n\r\n## Implementation details:\r\n- `datasets.table.Table` objects implement interpolation search for the `slice` method\r\n- The interpolation search requires to store the offsets of all the chunks of a table. The offsets are stored when the `Table` is initialized.\r\n- `datasets.table.Table.slice` returns a `datasets.table.Table` using interpolation search\r\n- `datasets.table.Table.fast_slice` returns a `pyarrow.Table` object using interpolation search. This is useful to get a part of a dataset if we don't need the indexing structure for future computations. For example it's used when querying an example as a dictionary.\r\n- Now a `Dataset` object is always backed by a `datasets.table.Table` object. If one passes a `pyarrow.Table` to initialize a `Dataset`, then it's converted to a `datasets.table.Table`\r\n\r\n## Checklist:\r\n\r\n- [x] implement interpolation search\r\n- [x] use `datasets.table.Table` in `Dataset` objects\r\n- [x] update current tests\r\n- [x] add tests for interpolation search\r\n- [x] comments and docstring\r\n- [x] add the benchmark to the CI\r\n\r\nFix #1803.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2121\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2121","id":842148633,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAxNzc4NDc4","number":2121,"title":"Add Validation For README","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-26T17:02:17Z","updated_at":"2021-05-10T13:17:18Z","closed_at":"2021-05-10T09:41:41Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2121","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2121","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2121.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2121.patch"},"body":"Hi @lhoestq, @yjernite \r\n\r\nThis is a simple Readme parser. All classes specific to different sections can inherit `Section` class, and we can define more attributes in each.\r\n\r\nLet me know if this is going in the right direction :)\r\n\r\nCurrently the output looks like this, for `to_dict()` on `FashionMNIST` `README.md`:\r\n\r\n```json\r\n{\r\n \"name\": \".\/datasets\/fashion_mnist\/README.md\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Dataset Card for FashionMNIST\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Table of Contents\",\r\n \"attributes\": \"- [Dataset Description](#dataset-description)\\n - [Dataset Summary](#dataset-summary)\\n - [Supported Tasks](#supported-tasks-and-leaderboards)\\n - [Languages](#languages)\\n- [Dataset Structure](#dataset-structure)\\n - [Data Instances](#data-instances)\\n - [Data Fields](#data-instances)\\n - [Data Splits](#data-instances)\\n- [Dataset Creation](#dataset-creation)\\n - [Curation Rationale](#curation-rationale)\\n - [Source Data](#source-data)\\n - [Annotations](#annotations)\\n - [Personal and Sensitive Information](#personal-and-sensitive-information)\\n- [Considerations for Using the Data](#considerations-for-using-the-data)\\n - [Social Impact of Dataset](#social-impact-of-dataset)\\n - [Discussion of Biases](#discussion-of-biases)\\n - [Other Known Limitations](#other-known-limitations)\\n- [Additional Information](#additional-information)\\n - [Dataset Curators](#dataset-curators)\\n - [Licensing Information](#licensing-information)\\n - [Citation Information](#citation-information)\\n - [Contributions](#contributions)\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Dataset Description\",\r\n \"attributes\": \"- **Homepage:** [GitHub](https:\/\/github.com\/zalandoresearch\/fashion-mnist)\\n- **Repository:** [GitHub](https:\/\/github.com\/zalandoresearch\/fashion-mnist)\\n- **Paper:** [arXiv](https:\/\/arxiv.org\/pdf\/1708.07747.pdf)\\n- **Leaderboard:**\\n- **Point of Contact:**\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Dataset Summary\",\r\n \"attributes\": \"Fashion-MNIST is a dataset of Zalando's article images\\u2014consisting of a training set of 60,000 examples and a test set of 10,000 examples. Each example is a 28x28 grayscale image, associated with a label from 10 classes. We intend Fashion-MNIST to serve as a direct drop-in replacement for the original MNIST dataset for benchmarking machine learning algorithms. It shares the same image size and structure of training and testing splits.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Supported Tasks and Leaderboards\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Languages\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Dataset Structure\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Data Instances\",\r\n \"attributes\": \"A data point comprises an image and its label.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Data Fields\",\r\n \"attributes\": \"- `image`: a 2d array of integers representing the 28x28 image.\\n- `label`: an integer between 0 and 9 representing the classes with the following mapping:\\n | Label | Description |\\n | --- | --- |\\n | 0 | T-shirt\/top |\\n | 1 | Trouser |\\n | 2 | Pullover |\\n | 3 | Dress |\\n | 4 | Coat |\\n | 5 | Sandal |\\n | 6 | Shirt |\\n | 7 | Sneaker |\\n | 8 | Bag |\\n | 9 | Ankle boot |\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Data Splits\",\r\n \"attributes\": \"The data is split into training and test set. The training set contains 60,000 images and the test set 10,000 images.\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Dataset Creation\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Curation Rationale\",\r\n \"attributes\": \"**From the arXiv paper:**\\nThe original MNIST dataset contains a lot of handwritten digits. Members of the AI\/ML\/Data Science community love this dataset and use it as a benchmark to validate their algorithms. In fact, MNIST is often the first dataset researchers try. \\\"If it doesn't work on MNIST, it won't work at all\\\", they said. \\\"Well, if it does work on MNIST, it may still fail on others.\\\"\\nHere are some good reasons:\\n- MNIST is too easy. Convolutional nets can achieve 99.7% on MNIST. Classic machine learning algorithms can also achieve 97% easily. Check out our side-by-side benchmark for Fashion-MNIST vs. MNIST, and read \\\"Most pairs of MNIST digits can be distinguished pretty well by just one pixel.\\\"\\n- MNIST is overused. In this April 2017 Twitter thread, Google Brain research scientist and deep learning expert Ian Goodfellow calls for people to move away from MNIST.\\n- MNIST can not represent modern CV tasks, as noted in this April 2017 Twitter thread, deep learning expert\/Keras author Fran\\u00e7ois Chollet.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Source Data\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Initial Data Collection and Normalization\",\r\n \"attributes\": \"**From the arXiv paper:**\\nFashion-MNIST is based on the assortment on Zalando\\u2019s website. Every fashion product on Zalando has a set of pictures shot by professional photographers, demonstrating different aspects of the product, i.e. front and back looks, details, looks with model and in an outfit. The original picture has a light-gray background (hexadecimal color: #fdfdfd) and stored in 762 \\u00d7 1000 JPEG format. For efficiently serving different frontend components, the original picture is resampled with multiple resolutions, e.g. large, medium, small, thumbnail and tiny.\\nWe use the front look thumbnail images of 70,000 unique products to build Fashion-MNIST. Those products come from different gender groups: men, women, kids and neutral. In particular, whitecolor products are not included in the dataset as they have low contrast to the background. The thumbnails (51 \\u00d7 73) are then fed into the following conversion pipeline:\\n1. Converting the input to a PNG image.\\n2. Trimming any edges that are close to the color of the corner pixels. The \\u201ccloseness\\u201d is defined by the distance within 5% of the maximum possible intensity in RGB space.\\n3. Resizing the longest edge of the image to 28 by subsampling the pixels, i.e. some rows and columns are skipped over.\\n4. Sharpening pixels using a Gaussian operator of the radius and standard deviation of 1.0, with increasing effect near outlines.\\n5. Extending the shortest edge to 28 and put the image to the center of the canvas.\\n6. Negating the intensities of the image.\\n7. Converting the image to 8-bit grayscale pixels.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Who are the source image producers?\",\r\n \"attributes\": \"**From the arXiv paper:**\\nEvery fashion product on Zalando has a set of pictures shot by professional photographers, demonstrating different aspects of the product, i.e. front and back looks, details, looks with model and in an outfit.\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Annotations\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Annotation process\",\r\n \"attributes\": \"**From the arXiv paper:**\\nFor the class labels, they use the silhouette code of the product. The silhouette code is manually labeled by the in-house fashion experts and reviewed by a separate team at Zalando. Each product Zalando is the Europe\\u2019s largest online fashion platform. Each product contains only one silhouette code.\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Who are the annotators?\",\r\n \"attributes\": \"**From the arXiv paper:**\\nThe silhouette code is manually labeled by the in-house fashion experts and reviewed by a separate team at Zalando.\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Personal and Sensitive Information\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Considerations for Using the Data\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Social Impact of Dataset\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Discussion of Biases\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Other Known Limitations\",\r\n \"attributes\": \"[More Information Needed]\",\r\n \"subsections\": []\r\n }\r\n ]\r\n },\r\n {\r\n \"name\": \"Additional Information\",\r\n \"attributes\": \"\",\r\n \"subsections\": [\r\n {\r\n \"name\": \"Dataset Curators\",\r\n \"attributes\": \"Han Xiao and Kashif Rasul and Roland Vollgraf\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Licensing Information\",\r\n \"attributes\": \"MIT Licence\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Citation Information\",\r\n \"attributes\": \"@article{DBLP:journals\/corr\/abs-1708-07747,\\n author = {Han Xiao and\\n Kashif Rasul and\\n Roland Vollgraf},\\n title = {Fashion-MNIST: a Novel Image Dataset for Benchmarking Machine Learning\\n Algorithms},\\n journal = {CoRR},\\n volume = {abs\/1708.07747},\\n year = {2017},\\n url = {http:\/\/arxiv.org\/abs\/1708.07747},\\n archivePrefix = {arXiv},\\n eprint = {1708.07747},\\n timestamp = {Mon, 13 Aug 2018 16:47:27 +0200},\\n biburl = {https:\/\/dblp.org\/rec\/bib\/journals\/corr\/abs-1708-07747},\\n bibsource = {dblp computer science bibliography, https:\/\/dblp.org}\\n}\",\r\n \"subsections\": []\r\n },\r\n {\r\n \"name\": \"Contributions\",\r\n \"attributes\": \"Thanks to [@gchhablani](https:\/\/github.com\/gchablani) for adding this dataset.\",\r\n \"subsections\": []\r\n }\r\n ]\r\n }\r\n ]\r\n }\r\n ]\r\n}\r\n```\r\n\r\nThanks,\r\nGunjan","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2120\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2120","id":841954521,"node_id":"MDU6SXNzdWU4NDE5NTQ1MjE=","number":2120,"title":"dataset viewer does not work anymore ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-26T13:22:13Z","updated_at":"2021-03-26T15:52:22Z","closed_at":"2021-03-26T15:52:22Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI normally use this link to see all datasets and how I can load them \r\n\r\n\r\nhttps:\/\/huggingface.co\/datasets\/viewer\/\r\n\r\nNow I am getting \r\n\r\n502 Bad Gateway\r\nnginx\/1.18.0 (Ubuntu)\r\n\r\ncould you bring this webpage back ? this was very helpful @lhoestq \r\nthanks for your help ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2119\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2119","id":841567199,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAxMjg2MjIy","number":2119,"title":"copy.deepcopy os.environ instead of copy","user":{"login":"NihalHarish","id":5506053,"node_id":"MDQ6VXNlcjU1MDYwNTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5506053?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/NihalHarish","html_url":"https:\/\/github.com\/NihalHarish","followers_url":"https:\/\/api.github.com\/users\/NihalHarish\/followers","following_url":"https:\/\/api.github.com\/users\/NihalHarish\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/NihalHarish\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/NihalHarish\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/NihalHarish\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/NihalHarish\/orgs","repos_url":"https:\/\/api.github.com\/users\/NihalHarish\/repos","events_url":"https:\/\/api.github.com\/users\/NihalHarish\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/NihalHarish\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-26T03:58:38Z","updated_at":"2021-03-26T15:13:52Z","closed_at":"2021-03-26T15:13:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2119","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2119","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2119.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2119.patch"},"body":"Fixes: https:\/\/github.com\/huggingface\/datasets\/issues\/2115\r\n\r\n- bug fix: using envrion.copy() returns a dict.\r\n- using deepcopy(environ) returns an `_environ` object\r\n- Changing the datatype of the _environ object can break code, if subsequent libraries perform operations using apis exclusive to the environ object, like `environ.getenv()` for example.\r\n\r\n\r\nTesting:\r\n\r\nTested the change on my terminal:\r\n\r\n```\r\n>>> import os\r\n>>> x = deepcopy(os.environ)\r\n>>> y = os.environ\r\n>>> x is y\r\nFalse\r\n>>> isinstance(x, type(os.environ))\r\nTrue\r\n>>> z = os.environ.copy()\r\n>>> isinstance(z, type(os.environ))\r\nFalse\r\n>>> isinstance(z, dict)\r\nTrue\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2118\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2118","id":841563329,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAxMjgzMDUx","number":2118,"title":"Remove os.environ.copy in Dataset.map","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-26T03:48:17Z","updated_at":"2021-03-26T12:03:23Z","closed_at":"2021-03-26T12:00:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2118","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2118","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2118.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2118.patch"},"body":"Replace `os.environ.copy` with in-place modification\r\nFixes #2115 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2117\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2117","id":841535283,"node_id":"MDU6SXNzdWU4NDE1MzUyODM=","number":2117,"title":"load_metric from local \"glue.py\" meet error 'NoneType' object is not callable","user":{"login":"Frankie123421","id":54012361,"node_id":"MDQ6VXNlcjU0MDEyMzYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/54012361?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Frankie123421","html_url":"https:\/\/github.com\/Frankie123421","followers_url":"https:\/\/api.github.com\/users\/Frankie123421\/followers","following_url":"https:\/\/api.github.com\/users\/Frankie123421\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Frankie123421\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Frankie123421\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Frankie123421\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Frankie123421\/orgs","repos_url":"https:\/\/api.github.com\/users\/Frankie123421\/repos","events_url":"https:\/\/api.github.com\/users\/Frankie123421\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Frankie123421\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-26T02:35:22Z","updated_at":"2021-06-08T17:17:01Z","closed_at":"2021-03-26T02:40:26Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"actual_task = \"mnli\" if task == \"mnli-mm\" else task\r\ndataset = load_dataset(path='\/home\/glue.py', name=actual_task)\r\nmetric = load_metric(path='\/home\/glue.py', name=actual_task)\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in \r\n 1 actual_task = \"mnli\" if task == \"mnli-mm\" else task\r\n 2 dataset = load_dataset(path='\/home\/jcli\/glue.py', name=actual_task)\r\n----> 3 metric = load_metric(path='\/home\/jcli\/glue.py', name=actual_task)\r\n\r\n~\/anaconda3\/envs\/pytorch\/lib\/python3.6\/site-packages\/datasets\/load.py in load_metric(path, config_name, process_id, num_process, cache_dir, experiment_id, keep_in_memory, download_config, download_mode, script_version, **metric_init_kwargs)\r\n 508 keep_in_memory=keep_in_memory,\r\n 509 experiment_id=experiment_id,\r\n--> 510 **metric_init_kwargs,\r\n 511 )\r\n 512 \r\n\r\nTypeError: 'NoneType' object is not callable\r\n\r\nPlease help","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2116\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2116","id":841481292,"node_id":"MDU6SXNzdWU4NDE0ODEyOTI=","number":2116,"title":"Creating custom dataset results in error while calling the map() function","user":{"login":"GeetDsa","id":13940397,"node_id":"MDQ6VXNlcjEzOTQwMzk3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13940397?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/GeetDsa","html_url":"https:\/\/github.com\/GeetDsa","followers_url":"https:\/\/api.github.com\/users\/GeetDsa\/followers","following_url":"https:\/\/api.github.com\/users\/GeetDsa\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/GeetDsa\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/GeetDsa\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/GeetDsa\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/GeetDsa\/orgs","repos_url":"https:\/\/api.github.com\/users\/GeetDsa\/repos","events_url":"https:\/\/api.github.com\/users\/GeetDsa\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/GeetDsa\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-26T00:37:46Z","updated_at":"2021-03-31T14:30:32Z","closed_at":"2021-03-31T14:30:32Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"calling `map()` of `datasets` library results into an error while defining a Custom dataset.\r\nReproducible example:\r\n```\r\nimport datasets\r\nclass MyDataset(datasets.Dataset):\r\n\r\n def __init__(self, sentences):\r\n \"Initialization\"\r\n self.samples = sentences\r\n\r\n def __len__(self):\r\n \"Denotes the total number of samples\"\r\n return len(self.samples)\r\n\r\n def __getitem__(self, index):\r\n \"Generates one sample of data\"\r\n # Select sample\r\n # Load data and get label\r\n samples = self.samples[index]\r\n\r\n return samples\r\n\r\ndef preprocess_function_train(examples):\r\n inputs = examples\r\n labels = [example+tokenizer.eos_token for example in examples ]\r\n inputs = tokenizer(inputs, max_length=30, padding=True, truncation=True)\r\n labels = tokenizer(labels, max_length=30, padding=True, truncation=True)\r\n model_inputs = inputs\r\n model_inputs[\"labels\"] = labels[\"input_ids\"]\r\n print(\"about to return\")\r\n return model_inputs\r\n\r\n\r\n##train[\"sentence\"] is dataframe column\r\ntrain_dataset = MyDataset(train['sentence'].values.tolist())\r\ntrain_dataset = train_dataset.map(\r\n preprocess_function,\r\n batched = True,\r\n batch_size=32\r\n )\r\n```\r\n\r\nStack trace of error:\r\n```\r\nTraceback (most recent call last):\r\n File \"dir\/train_generate.py\", line 362, in \r\n main()\r\n File \"dir\/train_generate.py\", line 245, in main\r\n train_dataset = train_dataset.map(\r\n File \"anaconda_dir\/anaconda3\/envs\/env1\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1244, in map\r\n return self._map_single(\r\n File \"anaconda_dir\/anaconda3\/envs\/env1\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 149, in wrapper\r\n unformatted_columns = set(self.column_names) - set(self._format_columns or [])\r\n File \"anaconda_dir\/anaconda3\/envs\/env1\/lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 526, in column_names\r\n return self._data.column_names\r\nAttributeError: 'MyDataset' object has no attribute '_data'\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2115\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2115","id":841283974,"node_id":"MDU6SXNzdWU4NDEyODM5NzQ=","number":2115,"title":"The datasets.map() implementation modifies the datatype of os.environ object","user":{"login":"leleamol","id":19983848,"node_id":"MDQ6VXNlcjE5OTgzODQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19983848?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/leleamol","html_url":"https:\/\/github.com\/leleamol","followers_url":"https:\/\/api.github.com\/users\/leleamol\/followers","following_url":"https:\/\/api.github.com\/users\/leleamol\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/leleamol\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/leleamol\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/leleamol\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/leleamol\/orgs","repos_url":"https:\/\/api.github.com\/users\/leleamol\/repos","events_url":"https:\/\/api.github.com\/users\/leleamol\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/leleamol\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-25T20:29:19Z","updated_at":"2021-03-26T15:13:52Z","closed_at":"2021-03-26T15:13:52Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"In our testing, we noticed that the datasets.map() implementation is modifying the datatype of python os.environ object from '_Environ' to 'dict'.\r\n\r\nThis causes following function calls to fail as follows:\r\n\r\n` \r\n x = os.environ.get(\"TEST_ENV_VARIABLE_AFTER_dataset_map\", default=None)\r\n TypeError: get() takes no keyword arguments\r\n`\r\nIt looks like the following line in datasets.map implementation introduced this functionality.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/0cb1ac06acb0df44a1cf4128d03a01865faa2504\/src\/datasets\/arrow_dataset.py#L1421\r\n\r\nHere is the test script to reproduce this error. \r\n\r\n\r\n```\r\nfrom datasets import load_dataset\r\nfrom transformers import AutoTokenizer\r\nimport os\r\n\r\n\r\ndef test_train():\r\n model_checkpoint = \"distilgpt2\"\r\n datasets = load_dataset('wikitext', 'wikitext-2-raw-v1')\r\n tokenizer = AutoTokenizer.from_pretrained(model_checkpoint, use_fast=True)\r\n tokenizer.pad_token = tokenizer.eos_token\r\n\r\n\r\n def tokenize_function(examples):\r\n y = tokenizer(examples['text'], truncation=True, max_length=64)\r\n return y\r\n\r\n x = os.environ.get(\"TEST_ENV_VARIABLE_BEFORE_dataset_map\", default=None)\r\n print(f\"Testing environment variable: TEST_ENV_VARIABLE_BEFORE_dataset_map {x}\")\r\n print(f\"Data type of os.environ before datasets.map = {os.environ.__class__.__name__}\")\r\n datasets.map(tokenize_function, batched=True, num_proc=2, remove_columns=[\"text\"])\r\n print(f\"Data type of os.environ after datasets.map = {os.environ.__class__.__name__}\")\r\n x = os.environ.get(\"TEST_ENV_VARIABLE_AFTER_dataset_map\", default=None)\r\n print(f\"Testing environment variable: TEST_ENV_VARIABLE_AFTER_dataset_map {x}\")\r\n\r\n\r\nif __name__ == \"__main__\":\r\n test_train()\r\n\r\n\r\n```\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2114\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2114","id":841207878,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwOTc1MTA3","number":2114,"title":"Support for legal NLP datasets (EURLEX, ECtHR cases and EU-REG-IR)","user":{"login":"iliaschalkidis","id":1626984,"node_id":"MDQ6VXNlcjE2MjY5ODQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1626984?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliaschalkidis","html_url":"https:\/\/github.com\/iliaschalkidis","followers_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/followers","following_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/repos","events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-25T18:40:17Z","updated_at":"2021-03-31T10:38:50Z","closed_at":"2021-03-31T10:38:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2114","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2114","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2114.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2114.patch"},"body":"Add support for two legal NLP datasets:\r\n\r\n- EURLEX (https:\/\/www.aclweb.org\/anthology\/P19-1636\/)\r\n- ECtHR cases (https:\/\/arxiv.org\/abs\/2103.13084)\r\n- EU-REG-IR (https:\/\/arxiv.org\/abs\/2101.10726)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2113\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2113","id":841191303,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwOTYxMDEz","number":2113,"title":"Implement Dataset as context manager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-25T18:18:30Z","updated_at":"2021-03-31T11:30:14Z","closed_at":"2021-03-31T08:30:11Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2113","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2113","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2113.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2113.patch"},"body":"When used as context manager, it would be safely deleted if some exception is raised.\r\n\r\nThis will avoid \r\n> During handling of the above exception, another exception occurred:","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2112\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2112","id":841098008,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwODgyMjA0","number":2112,"title":"Support for legal NLP datasets (EURLEX and ECtHR cases)","user":{"login":"iliaschalkidis","id":1626984,"node_id":"MDQ6VXNlcjE2MjY5ODQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1626984?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/iliaschalkidis","html_url":"https:\/\/github.com\/iliaschalkidis","followers_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/followers","following_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/orgs","repos_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/repos","events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/iliaschalkidis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-25T16:24:17Z","updated_at":"2021-03-25T18:39:31Z","closed_at":"2021-03-25T18:34:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2112","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2112","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2112.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2112.patch"},"body":"Add support for two legal NLP datasets:\r\n- EURLEX (https:\/\/www.aclweb.org\/anthology\/P19-1636\/)\r\n- ECtHR cases (https:\/\/arxiv.org\/abs\/2103.13084)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2111\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2111","id":841082087,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwODY4OTg5","number":2111,"title":"Compute WER metric iteratively","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-25T16:06:48Z","updated_at":"2021-04-06T07:20:43Z","closed_at":"2021-04-06T07:20:43Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2111","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2111","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2111.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2111.patch"},"body":"Compute WER metric iteratively to avoid MemoryError.\r\n\r\nFix #2078.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2110\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2110","id":840794995,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwNjI1NDQ5","number":2110,"title":"Fix incorrect assertion in builder.py","user":{"login":"dreamgonfly","id":2340721,"node_id":"MDQ6VXNlcjIzNDA3MjE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2340721?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dreamgonfly","html_url":"https:\/\/github.com\/dreamgonfly","followers_url":"https:\/\/api.github.com\/users\/dreamgonfly\/followers","following_url":"https:\/\/api.github.com\/users\/dreamgonfly\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dreamgonfly\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dreamgonfly\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dreamgonfly\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dreamgonfly\/orgs","repos_url":"https:\/\/api.github.com\/users\/dreamgonfly\/repos","events_url":"https:\/\/api.github.com\/users\/dreamgonfly\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dreamgonfly\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-25T10:39:20Z","updated_at":"2021-04-12T13:33:03Z","closed_at":"2021-04-12T13:33:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2110","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2110","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2110.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2110.patch"},"body":"Fix incorrect num_examples comparison assertion in builder.py","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2109\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2109","id":840746598,"node_id":"MDExOlB1bGxSZXF1ZXN0NjAwNTg1MzM5","number":2109,"title":"Add more issue templates and customize issue template chooser","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-25T09:41:53Z","updated_at":"2021-04-19T06:20:11Z","closed_at":"2021-04-19T06:20:11Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2109","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2109","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2109.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2109.patch"},"body":"When opening an issue, it is not evident for the users how to choose a blank issue template. There is a link at the bottom of all the other issue templates (`Don\u2019t see your issue here? Open a blank issue.`), but this is not very visible for users. This is the reason why many users finally chose the `add-dataset` template instead (this is more visible) for issues that indeed are not requesting the addition of a new dataset.\r\n\r\n~~With this PR, the default blank issue template would be as visible as the other templates (as the `add-dataset` template), thus making easier for the users to choose it.~~\r\n\r\nWith this PR:\r\n- more issue templates, besides `add-dataset`, are added: `bug-report` and `feature-request`\r\n- the issue template chooser is customized, so that it now includes a link to `Discussions` for questions","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2108\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2108","id":840181055,"node_id":"MDU6SXNzdWU4NDAxODEwNTU=","number":2108,"title":"Is there a way to use a GPU only when training an Index in the process of add_faisis_index?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892912,"node_id":"MDU6TGFiZWwxOTM1ODkyOTEy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/question","name":"question","color":"d876e3","default":true,"description":"Further information is requested"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-24T21:32:16Z","updated_at":"2021-03-25T06:31:43Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Motivation - Some FAISS indexes like IVF consist of the training step that clusters the dataset into a given number of indexes. It would be nice if we can use a GPU to do the training step and covert the index back to CPU as mention in [this faiss example](https:\/\/gist.github.com\/mdouze\/46d6bbbaabca0b9778fca37ed2bcccf6).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2107\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2107","id":839495825,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk5NTAxODE5","number":2107,"title":"Metadata validation","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"assignees":[{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-03-24T08:52:41Z","updated_at":"2021-04-26T08:27:14Z","closed_at":"2021-04-26T08:27:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2107","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2107","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2107.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2107.patch"},"body":"- `pydantic` metadata schema with dedicated validators against our taxonomy\r\n- ci script to validate new changes against this schema and start a vertuous loop\r\n- soft validation on tasks ids since we expect the taxonomy to undergo some changes in the near future\r\n\r\nfor reference with the current validation we have ~365~ 378 datasets with invalid metadata! full error report [_here_.](https:\/\/gist.github.com\/theo-m\/61b3c0c47fc6121d08d3174bd4c2a26b)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2106\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2106","id":839084264,"node_id":"MDU6SXNzdWU4MzkwODQyNjQ=","number":2106,"title":"WMT19 Dataset for Kazakh-English is not formatted correctly","user":{"login":"trina731","id":22580542,"node_id":"MDQ6VXNlcjIyNTgwNTQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22580542?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/trina731","html_url":"https:\/\/github.com\/trina731","followers_url":"https:\/\/api.github.com\/users\/trina731\/followers","following_url":"https:\/\/api.github.com\/users\/trina731\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/trina731\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/trina731\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/trina731\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/trina731\/orgs","repos_url":"https:\/\/api.github.com\/users\/trina731\/repos","events_url":"https:\/\/api.github.com\/users\/trina731\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/trina731\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-23T20:14:47Z","updated_at":"2021-03-25T21:36:20Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"In addition to the bug of languages being switched from Issue @415, there are incorrect translations in the dataset because the English-Kazakh translations have a one off formatting error.\r\n\r\nThe News Commentary v14 parallel data set for kk-en from http:\/\/www.statmt.org\/wmt19\/translation-task.html has a bug here:\r\n\r\n> Line 94. The Swiss National Bank, for its part, has been battling with the deflationary effects of the franc\u2019s dramatic appreciation over the past few years.\t\u0428\u0432\u0435\u0439\u0446\u0430\u0440\u0438\u044f\u043d\u044b\u04a3 \u04b0\u043b\u0442\u0442\u044b\u049b \u0431\u0430\u043d\u043a\u0456 \u04e9\u0437 \u0442\u0430\u0440\u0430\u043f\u044b\u043d\u0430\u043d, \u0441\u043e\u04a3\u0493\u044b \u0431\u0456\u0440\u043d\u0435\u0448\u0435 \u0436\u044b\u043b \u0456\u0448\u0456\u043d\u0434\u0435 \u0444\u0440\u0430\u043d\u043a \u049b\u04b1\u043d\u044b\u043d\u044b\u04a3 \u049b\u0430\u0442\u0442\u044b \u04e9\u0441\u0443\u0456\u043d\u0456\u04a3 \u0434\u0435\u0444\u043b\u044f\u0446\u0438\u044f\u043b\u044b\u049b \u04d9\u0441\u0435\u0440\u0456\u043c\u0435\u043d \u043a\u04af\u0440\u0435\u0441\u0456\u043f \u043a\u0435\u043b\u0435\u0434\u0456.\r\n> \r\n> Line 95. \u0414\u0435\u0444\u043b\u044f\u0446\u0438\u044f\u043b\u044b\u049b \u043a\u04af\u0448\u0442\u0435\u0440 2008 \u0436\u044b\u043b\u044b \u0442\u0435\u0440\u0435\u04a3 \u0436\u04d9\u043d\u0435 \u04b1\u0437\u0430\u049b\u049b\u0430 \u0441\u043e\u0437\u044b\u043b\u0493\u0430\u043d \u0436\u0430\u04bb\u0430\u043d\u0434\u044b\u049b \u0434\u0430\u0493\u0434\u0430\u0440\u044b\u0441\u049b\u0430 \u0431\u0430\u0439\u043b\u0430\u043d\u044b\u0441\u0442\u044b \u043e\u0440\u044b\u043d \u0430\u043b\u0493\u0430\u043d \u0456\u0440\u0456 \u044d\u043a\u043e\u043d\u043e\u043c\u0438\u043a\u0430\u043b\u044b\u049b \u0436\u04d9\u043d\u0435 \u049b\u0430\u0440\u0436\u044b\u043b\u044b\u049b \u043e\u0440\u044b\u043d \u0430\u043b\u043c\u0430\u0441\u0443\u043b\u0430\u0440\u0434\u044b\u04a3 \u0430\u0440\u049b\u0430\u0441\u044b\u043d\u0434\u0430 \u0431\u043e\u0441\u0430\u0442\u044b\u043b\u0434\u044b. \u0416\u0435\u043a\u0435 \u049b\u0430\u0440\u044b\u0437 \u049b\u0430\u0440\u0430\u0436\u0430\u0442\u044b \u04af\u043b\u0435\u0441\u0456\u043d\u0456\u04a3 \u049b\u044b\u0441\u049b\u0430\u0440\u0443\u044b \u043e\u0440\u0442\u0430\u043b\u044b\u049b \u0431\u0430\u043d\u043a\u0442\u0456\u04a3 \u0440\u0435\u0444\u043b\u044f\u0446\u0438\u044f\u0493\u0430 \u0436\u04b1\u043c\u0441\u0430\u043b\u0493\u0430\u043d \u043a\u04af\u0448-\u0436\u0456\u0433\u0435\u0440\u0456\u043d\u0435 \u0442\u04b1\u0440\u0430\u049b\u0442\u044b \u0441\u043e\u049b\u049b\u0430\u043d \u049b\u0430\u0440\u0441\u044b \u0436\u0435\u043b\u0434\u0435\u0439 \u0431\u043e\u043b\u0434\u044b.\r\n> \r\n> Line 96. The deflationary forces were unleashed by the major economic and financial dislocations associated with the deep and protracted global crisis that erupted in 2008. Private deleveraging became a steady headwind to central bank efforts to reflate.\t2009 \u0436\u044b\u043b\u044b, \u0430\u043b\u0434\u044b\u04a3\u0493\u044b \u049b\u0430\u0442\u0430\u0440\u043b\u044b \u044d\u043a\u043e\u043d\u043e\u043c\u0438\u043a\u0430\u043b\u0430\u0440\u0434\u044b\u04a3 \u0448\u0430\u043c\u0430\u043c\u0435\u043d \u04af\u0448\u0442\u0435\u043d \u0431\u0456\u0440\u0456 \u0431\u0430\u0493\u0430\u043d\u044b\u04a3 \u0442\u04e9\u043c\u0435\u043d\u0434\u0435\u0443\u0456\u043d \u043a\u04e9\u0440\u0441\u0435\u0442\u0442\u0456, \u0431\u04b1\u043b \u0441\u043e\u0493\u044b\u0441\u0442\u0430\u043d \u043a\u0435\u0439\u0456\u043d\u0433\u0456 \u0436\u043e\u0493\u0430\u0440\u044b \u0434\u0435\u04a3\u0433\u0435\u0439 \u0431\u043e\u043b\u0434\u044b.\r\n\r\nAs you can see, line 95 has only the Kazakh translation which should be part of line 96. This causes all of the following English-Kazakh translation pairs to be one off rendering ALL of those translations incorrect. This issue was not fixed when the dataset was imported to Huggingface. By running this code \r\n\r\n```\r\nimport datasets\r\nfrom datasets import load_dataset\r\ndataset = load_dataset('wmt19', 'kk-en')\r\nfor key in dataset['train']['translation']:\r\n if 'The deflationary forces were unleashed by the major economic and financial dislocations associated with the deep and protracted global crisis that erupted in 2008.' in key['kk']:\r\n print(key['en'])\r\n print(key['kk'])\r\n break\r\n```\r\nwe get: \r\n> 2009 \u0436\u044b\u043b\u044b, \u0430\u043b\u0434\u044b\u04a3\u0493\u044b \u049b\u0430\u0442\u0430\u0440\u043b\u044b \u044d\u043a\u043e\u043d\u043e\u043c\u0438\u043a\u0430\u043b\u0430\u0440\u0434\u044b\u04a3 \u0448\u0430\u043c\u0430\u043c\u0435\u043d \u04af\u0448\u0442\u0435\u043d \u0431\u0456\u0440\u0456 \u0431\u0430\u0493\u0430\u043d\u044b\u04a3 \u0442\u04e9\u043c\u0435\u043d\u0434\u0435\u0443\u0456\u043d \u043a\u04e9\u0440\u0441\u0435\u0442\u0442\u0456, \u0431\u04b1\u043b \u0441\u043e\u0493\u044b\u0441\u0442\u0430\u043d \u043a\u0435\u0439\u0456\u043d\u0433\u0456 \u0436\u043e\u0493\u0430\u0440\u044b \u0434\u0435\u04a3\u0433\u0435\u0439 \u0431\u043e\u043b\u0434\u044b.\r\n> The deflationary forces were unleashed by the major economic and financial dislocations associated with the deep and protracted global crisis that erupted in 2008. Private deleveraging became a steady headwind to central bank efforts to reflate.\r\n\r\nwhich shows that the issue still persists in the Huggingface dataset. The Kazakh sentence matches up to the next English sentence in the dataset instead of the current one.\r\n\r\nPlease let me know if there's you have any ideas to fix this one-off error from the dataset or if this can be fixed by Huggingface.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2105\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2105","id":839059226,"node_id":"MDU6SXNzdWU4MzkwNTkyMjY=","number":2105,"title":"Request to remove S2ORC dataset","user":{"login":"kyleclo","id":13603748,"node_id":"MDQ6VXNlcjEzNjAzNzQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13603748?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/kyleclo","html_url":"https:\/\/github.com\/kyleclo","followers_url":"https:\/\/api.github.com\/users\/kyleclo\/followers","following_url":"https:\/\/api.github.com\/users\/kyleclo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/kyleclo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/kyleclo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/kyleclo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/kyleclo\/orgs","repos_url":"https:\/\/api.github.com\/users\/kyleclo\/repos","events_url":"https:\/\/api.github.com\/users\/kyleclo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/kyleclo\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-23T19:43:06Z","updated_at":"2021-08-04T19:18:02Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi! I was wondering if it's possible to remove [S2ORC](https:\/\/huggingface.co\/datasets\/s2orc) from hosting on Huggingface's platform? Unfortunately, there are some legal considerations about how we make this data available. Happy to add back to Huggingface's platform once we work out those hurdles! Thanks!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2104\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2104","id":839027834,"node_id":"MDU6SXNzdWU4MzkwMjc4MzQ=","number":2104,"title":"Trouble loading wiki_movies","user":{"login":"adityaarunsinghal","id":35391599,"node_id":"MDQ6VXNlcjM1MzkxNTk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35391599?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adityaarunsinghal","html_url":"https:\/\/github.com\/adityaarunsinghal","followers_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/followers","following_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/orgs","repos_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/repos","events_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adityaarunsinghal\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-23T18:59:54Z","updated_at":"2021-04-05T23:17:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello,\r\nI am trying to load_dataset(\"wiki_movies\") and it gives me this error - \r\n\r\n`FileNotFoundError: Couldn't find file locally at wiki_movies\/wiki_movies.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.2\/datasets\/wiki_movies\/wiki_movies.py or https:\/\/s3.amazonaws.com\/datasets.huggingface.co\/datasets\/datasets\/wiki_movies\/wiki_movies.py`\r\n\r\nTrying to do `python run_mlm.py \\\r\n --model_name_or_path roberta-base \\\r\n --dataset_name wiki_movies \\` also gives the same error. \r\n\r\nIs this something on my end? From what I can tell, this dataset was re-added by @lhoestq a few months ago. \r\nThank you!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2103\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2103","id":838946916,"node_id":"MDU6SXNzdWU4Mzg5NDY5MTY=","number":2103,"title":"citation, homepage, and license fields of `dataset_info.json` are duplicated many times","user":{"login":"samsontmr","id":15007950,"node_id":"MDQ6VXNlcjE1MDA3OTUw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15007950?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/samsontmr","html_url":"https:\/\/github.com\/samsontmr","followers_url":"https:\/\/api.github.com\/users\/samsontmr\/followers","following_url":"https:\/\/api.github.com\/users\/samsontmr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/samsontmr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/samsontmr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/samsontmr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/samsontmr\/orgs","repos_url":"https:\/\/api.github.com\/users\/samsontmr\/repos","events_url":"https:\/\/api.github.com\/users\/samsontmr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/samsontmr\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-23T17:18:09Z","updated_at":"2021-04-06T14:39:59Z","closed_at":"2021-04-06T14:39:59Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"This happens after a `map` operation when `num_proc` is set to `>1`. I tested this by cleaning up the json before running the `map` op on the dataset so it's unlikely it's coming from an earlier concatenation.\r\n\r\nExample result:\r\n```\r\n\"citation\": \"@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n url = {https:\/\/dumps.wikimedia.org}\\n}\\n\\n@ONLINE {wikidump,\\n author = {Wikimedia Foundation},\\n title = {Wikimedia Downloads},\\n\r\n```\r\n\r\n@lhoestq and I believe this is happening due to the fields being concatenated `num_proc` times.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2102\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2102","id":838794090,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk4OTEyNzUw","number":2102,"title":"Move Dataset.to_csv to csv module","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":2851292821,"node_id":"MDU6TGFiZWwyODUxMjkyODIx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/refactoring","name":"refactoring","color":"B67A40","default":false,"description":"Restructuring existing code without changing its external behavior"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-23T14:35:46Z","updated_at":"2021-03-24T14:07:35Z","closed_at":"2021-03-24T14:07:34Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2102","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2102","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2102.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2102.patch"},"body":"Move the implementation of `Dataset.to_csv` to module `datasets.io.csv`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2101\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2101","id":838586184,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk4NzQzMDM4","number":2101,"title":"MIAM dataset - new citation details","user":{"login":"eusip","id":1551356,"node_id":"MDQ6VXNlcjE1NTEzNTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1551356?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eusip","html_url":"https:\/\/github.com\/eusip","followers_url":"https:\/\/api.github.com\/users\/eusip\/followers","following_url":"https:\/\/api.github.com\/users\/eusip\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eusip\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eusip\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eusip\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eusip\/orgs","repos_url":"https:\/\/api.github.com\/users\/eusip\/repos","events_url":"https:\/\/api.github.com\/users\/eusip\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eusip\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-23T10:41:23Z","updated_at":"2021-03-23T18:08:10Z","closed_at":"2021-03-23T18:08:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2101","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2101","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2101.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2101.patch"},"body":"Hi @lhoestq, I have updated the citations to reference an OpenReview preprint.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2100\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2100","id":838574631,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk4NzMzOTM0","number":2100,"title":"Fix deprecated warning message and docstring","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-23T10:27:52Z","updated_at":"2021-03-24T08:19:41Z","closed_at":"2021-03-23T18:03:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2100","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2100","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2100.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2100.patch"},"body":"Fix deprecated warnings:\r\n- Use deprecated Sphinx directive in docstring\r\n- Fix format of deprecated message\r\n- Raise FutureWarning","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2099\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2099","id":838523819,"node_id":"MDU6SXNzdWU4Mzg1MjM4MTk=","number":2099,"title":"load_from_disk takes a long time to load local dataset","user":{"login":"samsontmr","id":15007950,"node_id":"MDQ6VXNlcjE1MDA3OTUw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15007950?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/samsontmr","html_url":"https:\/\/github.com\/samsontmr","followers_url":"https:\/\/api.github.com\/users\/samsontmr\/followers","following_url":"https:\/\/api.github.com\/users\/samsontmr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/samsontmr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/samsontmr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/samsontmr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/samsontmr\/orgs","repos_url":"https:\/\/api.github.com\/users\/samsontmr\/repos","events_url":"https:\/\/api.github.com\/users\/samsontmr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/samsontmr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-03-23T09:28:37Z","updated_at":"2021-03-23T17:12:16Z","closed_at":"2021-03-23T17:12:16Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I have an extremely large tokenized dataset (24M examples) that loads in a few minutes. However, after adding a column similar to `input_ids` (basically a list of integers) and saving the dataset to disk, the load time goes to >1 hour. I've even tried using `np.uint8` after seeing #1985 but it doesn't seem to be helping (the total size seems to be smaller though).\r\n\r\nDoes anyone know what could be the issue? Or does the casting of that column to `int8` need to happen in the function that writes the arrow table instead of in the `map` where I create the list of integers?\r\n\r\nTagging @lhoestq since you seem to be working on these issues and PRs :)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2098\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2098","id":838447959,"node_id":"MDU6SXNzdWU4Mzg0NDc5NTk=","number":2098,"title":"SQuAD version ","user":{"login":"h-peng17","id":39556019,"node_id":"MDQ6VXNlcjM5NTU2MDE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39556019?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/h-peng17","html_url":"https:\/\/github.com\/h-peng17","followers_url":"https:\/\/api.github.com\/users\/h-peng17\/followers","following_url":"https:\/\/api.github.com\/users\/h-peng17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/h-peng17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/h-peng17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/h-peng17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/h-peng17\/orgs","repos_url":"https:\/\/api.github.com\/users\/h-peng17\/repos","events_url":"https:\/\/api.github.com\/users\/h-peng17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/h-peng17\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-23T07:47:54Z","updated_at":"2021-03-26T09:48:54Z","closed_at":"2021-03-26T09:48:54Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi~ \r\nI want train on squad dataset. What's the version of the squad? Is it 1.1 or 1.0? I'm new in QA, I don't find some descriptions about it. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2097\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2097","id":838105289,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk4MzM4MTA3","number":2097,"title":"fixes issue #1110 by descending further if `obj[\"_type\"]` is a dict","user":{"login":"dcfidalgo","id":15979778,"node_id":"MDQ6VXNlcjE1OTc5Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15979778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dcfidalgo","html_url":"https:\/\/github.com\/dcfidalgo","followers_url":"https:\/\/api.github.com\/users\/dcfidalgo\/followers","following_url":"https:\/\/api.github.com\/users\/dcfidalgo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dcfidalgo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dcfidalgo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dcfidalgo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dcfidalgo\/orgs","repos_url":"https:\/\/api.github.com\/users\/dcfidalgo\/repos","events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-22T21:00:55Z","updated_at":"2021-03-22T21:01:11Z","closed_at":"2021-03-22T21:01:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2097","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2097","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2097.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2097.patch"},"body":"Check metrics","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2096\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2096","id":838038379,"node_id":"MDU6SXNzdWU4MzgwMzgzNzk=","number":2096,"title":"CoNLL 2003 dataset not including German","user":{"login":"rxian","id":8406802,"node_id":"MDQ6VXNlcjg0MDY4MDI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8406802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/rxian","html_url":"https:\/\/github.com\/rxian","followers_url":"https:\/\/api.github.com\/users\/rxian\/followers","following_url":"https:\/\/api.github.com\/users\/rxian\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/rxian\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/rxian\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/rxian\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/rxian\/orgs","repos_url":"https:\/\/api.github.com\/users\/rxian\/repos","events_url":"https:\/\/api.github.com\/users\/rxian\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/rxian\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-22T19:23:56Z","updated_at":"2021-03-30T09:45:35Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello, thanks for all the work on developing and maintaining this amazing platform, which I am enjoying working with!\r\n\r\nI was wondering if there is a reason why the German CoNLL 2003 dataset is not included in the [repository](https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/conll2003), since a copy of it could be found in some places on the internet such as GitHub? I could help adding the German data to the hub, unless there are some copyright issues that I am unaware of...\r\n\r\nThis is considering that many work use the union of CoNLL 2002 and 2003 datasets for comparing cross-lingual NER transfer performance in `en`, `de`, `es`, and `nl`. E.g., [XLM-R](https:\/\/www.aclweb.org\/anthology\/2020.acl-main.747.pdf).\r\n\r\n## Adding a Dataset\r\n- **Name:** CoNLL 2003 German\r\n- **Paper:** https:\/\/www.aclweb.org\/anthology\/W03-0419\/\r\n- **Data:** https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/conll2003\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2093\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2093","id":837209211,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3NTgyNjUx","number":2093,"title":"Fix: Allows a feature to be named \"_type\"","user":{"login":"dcfidalgo","id":15979778,"node_id":"MDQ6VXNlcjE1OTc5Nzc4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15979778?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dcfidalgo","html_url":"https:\/\/github.com\/dcfidalgo","followers_url":"https:\/\/api.github.com\/users\/dcfidalgo\/followers","following_url":"https:\/\/api.github.com\/users\/dcfidalgo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dcfidalgo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dcfidalgo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dcfidalgo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dcfidalgo\/orgs","repos_url":"https:\/\/api.github.com\/users\/dcfidalgo\/repos","events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dcfidalgo\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-21T23:21:57Z","updated_at":"2021-03-25T14:35:54Z","closed_at":"2021-03-25T14:35:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2093","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2093","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2093.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2093.patch"},"body":"This PR tries to fix issue #1110. Sorry for taking so long to come back to this.\r\n\r\nIt's a simple fix, but i am not sure if it works for all possible types of `obj`. Let me know what you think @lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2092\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2092","id":836984043,"node_id":"MDU6SXNzdWU4MzY5ODQwNDM=","number":2092,"title":"How to disable making arrow tables in load_dataset ?","user":{"login":"Jeevesh8","id":48825663,"node_id":"MDQ6VXNlcjQ4ODI1NjYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/48825663?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Jeevesh8","html_url":"https:\/\/github.com\/Jeevesh8","followers_url":"https:\/\/api.github.com\/users\/Jeevesh8\/followers","following_url":"https:\/\/api.github.com\/users\/Jeevesh8\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Jeevesh8\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Jeevesh8\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Jeevesh8\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Jeevesh8\/orgs","repos_url":"https:\/\/api.github.com\/users\/Jeevesh8\/repos","events_url":"https:\/\/api.github.com\/users\/Jeevesh8\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Jeevesh8\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-21T04:50:07Z","updated_at":"2021-03-26T18:37:40Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Is there a way to disable the construction of arrow tables, or to make them on the fly as the dataset is being used ?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2091\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2091","id":836831403,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3Mjk4ODI3","number":2091,"title":"Fix copy snippet in docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-20T15:08:22Z","updated_at":"2021-03-24T08:20:50Z","closed_at":"2021-03-23T17:18:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2091","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2091","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2091.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2091.patch"},"body":"With this change the lines starting with `...` in the code blocks can be properly copied to clipboard.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2090\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2090","id":836807498,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3MjgwNTEy","number":2090,"title":"Add machine translated multilingual STS benchmark dataset","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-20T13:28:07Z","updated_at":"2021-03-29T13:24:42Z","closed_at":"2021-03-29T13:00:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2090","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2090","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2090.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2090.patch"},"body":"also see here https:\/\/github.com\/PhilipMay\/stsb-multi-mt","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2089\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2089","id":836788019,"node_id":"MDU6SXNzdWU4MzY3ODgwMTk=","number":2089,"title":"Add documentaton for dataset README.md files","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-20T11:44:38Z","updated_at":"2021-07-12T17:41:40Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\nthe dataset README files have special headers.\r\nSomehow a documenation of the allowed values and tags is missing.\r\nCould you add that?\r\n\r\nJust to give some concrete questions that should be answered imo:\r\n- which values can be passted to multilinguality?\r\n- what should be passed to language_creators?\r\n- which values should licenses have? What do I say when it is a custom license? Should I add a link?\r\n- how should I choose size_categories ? What are valid ranges?\r\n- what are valid task_categories?\r\n\r\nThanks\r\nPhilip","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2088\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2088","id":836763733,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3MjQ4Mzk1","number":2088,"title":"change bibtex template to author instead of authors","user":{"login":"PhilipMay","id":229382,"node_id":"MDQ6VXNlcjIyOTM4Mg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/229382?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PhilipMay","html_url":"https:\/\/github.com\/PhilipMay","followers_url":"https:\/\/api.github.com\/users\/PhilipMay\/followers","following_url":"https:\/\/api.github.com\/users\/PhilipMay\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PhilipMay\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PhilipMay\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PhilipMay\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PhilipMay\/orgs","repos_url":"https:\/\/api.github.com\/users\/PhilipMay\/repos","events_url":"https:\/\/api.github.com\/users\/PhilipMay\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PhilipMay\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-20T09:23:44Z","updated_at":"2021-03-23T15:40:12Z","closed_at":"2021-03-23T15:40:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2088","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2088","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2088.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2088.patch"},"body":"Hi,\r\nIMO when using BibTex Author should be used instead of Authors.\r\nSee here: http:\/\/www.bibtex.org\/Using\/de\/\r\n\r\nThanks\r\nPhilip","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2087\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2087","id":836587392,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk3MDg4NTk2","number":2087,"title":"Update metadata if dataset features are modified","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-20T02:05:23Z","updated_at":"2021-04-09T09:25:33Z","closed_at":"2021-04-09T09:25:33Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2087","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2087","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2087.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2087.patch"},"body":"This PR adds a decorator that updates the dataset metadata if a previously executed transform modifies its features. \r\nFixes #2083 \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2086\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2086","id":836249587,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk2Nzg0Mjcz","number":2086,"title":"change user permissions to -rw-r--r--","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-19T18:14:56Z","updated_at":"2021-03-24T13:59:04Z","closed_at":"2021-03-24T13:59:04Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2086","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2086","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2086.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2086.patch"},"body":"Fix for #2065 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2085\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2085","id":835870994,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk2NDYyOTc2","number":2085,"title":"Fix max_wait_time in requests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-19T11:22:26Z","updated_at":"2021-03-23T15:36:38Z","closed_at":"2021-03-23T15:36:37Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2085","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2085","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2085.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2085.patch"},"body":"it was handled as a min time, not max cc @SBrandeis ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2084\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2084","id":835750671,"node_id":"MDU6SXNzdWU4MzU3NTA2NzE=","number":2084,"title":"CUAD - Contract Understanding Atticus Dataset","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-19T09:27:43Z","updated_at":"2021-04-16T08:50:44Z","closed_at":"2021-04-16T08:50:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"## Adding a Dataset\r\n- **Name:** CUAD - Contract Understanding Atticus Dataset\r\n- **Description:** As one of the only large, specialized NLP benchmarks annotated by experts, CUAD can serve as a challenging research benchmark for the broader NLP community.\r\n- **Paper:** https:\/\/arxiv.org\/abs\/2103.06268\r\n- **Data:** https:\/\/github.com\/TheAtticusProject\/cuad\/\r\n- **Motivation:** good domain specific datasets are valuable\r\n\r\nInstructions to add a new dataset can be found [here](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/ADD_NEW_DATASET.md).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2083\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2083","id":835695425,"node_id":"MDU6SXNzdWU4MzU2OTU0MjU=","number":2083,"title":"`concatenate_datasets` throws error when changing the order of datasets to concatenate","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-19T08:29:48Z","updated_at":"2021-04-09T09:25:33Z","closed_at":"2021-04-09T09:25:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Hey, \r\n\r\nI played around with the `concatenate_datasets(...)` function: https:\/\/huggingface.co\/docs\/datasets\/package_reference\/main_classes.html?highlight=concatenate_datasets#datasets.concatenate_datasets\r\n\r\nand noticed that when the order in which the datasets are concatenated changes an error is thrown where it should not IMO.\r\n\r\nHere is a google colab to reproduce the error: https:\/\/colab.research.google.com\/drive\/17VTFU4KQ735-waWZJjeOHS6yDTfV5ekK?usp=sharing","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2082\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2082","id":835401555,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk2MDY1NTM0","number":2082,"title":"Updated card using information from data statement and datasheet","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-19T00:39:38Z","updated_at":"2021-03-19T14:29:09Z","closed_at":"2021-03-19T14:29:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2082","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2082","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2082.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2082.patch"},"body":"I updated and clarified the REFreSD [data card](https:\/\/github.com\/mcmillanmajora\/datasets\/blob\/refresd_card\/datasets\/refresd\/README.md) with information from the Eleftheria's [website](https:\/\/elbria.github.io\/post\/refresd\/). I added brief descriptions where the initial card referred to the paper, and I also recreated some of the tables in the paper to show relevant dataset statistics.\r\n\r\nI'll email Eleftheria to see if she has any comments on the card. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2081\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2081","id":835112968,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1ODE3OTM4","number":2081,"title":"Fix docstrings issues","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-18T18:11:01Z","updated_at":"2021-04-07T14:37:43Z","closed_at":"2021-04-07T14:37:43Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2081","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2081","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2081.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2081.patch"},"body":"Fix docstring issues.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2080\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2080","id":835023000,"node_id":"MDU6SXNzdWU4MzUwMjMwMDA=","number":2080,"title":"Multidimensional arrays in a Dataset","user":{"login":"vermouthmjl","id":3142085,"node_id":"MDQ6VXNlcjMxNDIwODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3142085?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vermouthmjl","html_url":"https:\/\/github.com\/vermouthmjl","followers_url":"https:\/\/api.github.com\/users\/vermouthmjl\/followers","following_url":"https:\/\/api.github.com\/users\/vermouthmjl\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vermouthmjl\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vermouthmjl\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vermouthmjl\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vermouthmjl\/orgs","repos_url":"https:\/\/api.github.com\/users\/vermouthmjl\/repos","events_url":"https:\/\/api.github.com\/users\/vermouthmjl\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vermouthmjl\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-18T16:29:14Z","updated_at":"2021-03-25T12:46:53Z","closed_at":"2021-03-25T12:46:53Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm trying to put together a `datasets.Dataset` to be used with LayoutLM which is available in `transformers`. This model requires as input the bounding boxes of each of the token of a sequence. This is when I realized that `Dataset` does not support multi-dimensional arrays as a value for a column in a row.\r\n\r\nThe following code results in conversion error in pyarrow (`pyarrow.lib.ArrowInvalid: ('Can only convert 1-dimensional array values', 'Conversion failed for column bbox with type object')`)\r\n\r\n```\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\nimport numpy as np\r\n\r\ndataset = pd.DataFrame({\r\n 'bbox': [\r\n np.array([[1,2,3,4],[1,2,3,4],[1,2,3,4]]),\r\n np.array([[1,2,3,4],[1,2,3,4],[1,2,3,4]]),\r\n np.array([[1,2,3,4],[1,2,3,4],[1,2,3,4]]),\r\n np.array([[1,2,3,4],[1,2,3,4],[1,2,3,4]])\r\n ],\r\n 'input_ids': [1, 2, 3, 4]\r\n})\r\ndataset = Dataset.from_pandas(dataset)\r\n```\r\n\r\nSince I wanted to use pytorch for the downstream training task, I also tried a few ways to directly put in a column of 2-D pytorch tensor in a formatted dataset, but I can only have a list of 1-D tensors, or a list of arrays, or a list of lists.\r\n\r\n```\r\nimport torch\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\n\r\ndataset = pd.DataFrame({\r\n 'bbox': [\r\n [[1,2,3,4],[1,2,3,4],[1,2,3,4]],\r\n [[1,2,3,4],[1,2,3,4],[1,2,3,4]],\r\n [[1,2,3,4],[1,2,3,4],[1,2,3,4]],\r\n [[1,2,3,4],[1,2,3,4],[1,2,3,4]]\r\n ],\r\n 'input_ids': [1, 2, 3, 4]\r\n})\r\ndataset = Dataset.from_pandas(dataset)\r\n\r\ndef test(examples):\r\n return {'bbbox': torch.Tensor(examples['bbox'])}\r\ndataset = dataset.map(test)\r\nprint(dataset[0]['bbox'])\r\nprint(dataset[0]['bbbox'])\r\n\r\ndataset.set_format(type='torch', columns=['input_ids', 'bbox'], output_all_columns=True)\r\nprint(dataset[0]['bbox'])\r\nprint(dataset[0]['bbbox'])\r\n\r\ndef test2(examples):\r\n return {'bbbox': torch.stack(examples['bbox'])}\r\ndataset = dataset.map(test2)\r\n\r\nprint(dataset[0]['bbox'])\r\nprint(dataset[0]['bbbox'])\r\n```\r\n\r\nIs is possible to support n-D arrays\/tensors in datasets? \r\nIt seems that it can also be useful for this [feature request](https:\/\/github.com\/huggingface\/datasets\/issues\/263).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2079\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2079","id":834920493,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1NjU2MDQ5","number":2079,"title":"Refactorize Metric.compute signature to force keyword arguments only","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-18T15:05:50Z","updated_at":"2021-03-23T15:31:44Z","closed_at":"2021-03-23T15:31:44Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2079","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2079","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2079.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2079.patch"},"body":"Minor refactoring of Metric.compute signature to force the use of keyword arguments, by using the single star syntax.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2078\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2078","id":834694819,"node_id":"MDU6SXNzdWU4MzQ2OTQ4MTk=","number":2078,"title":"MemoryError when computing WER metric","user":{"login":"diego-fustes","id":5707233,"node_id":"MDQ6VXNlcjU3MDcyMzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5707233?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/diego-fustes","html_url":"https:\/\/github.com\/diego-fustes","followers_url":"https:\/\/api.github.com\/users\/diego-fustes\/followers","following_url":"https:\/\/api.github.com\/users\/diego-fustes\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/diego-fustes\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/diego-fustes\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/diego-fustes\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/diego-fustes\/orgs","repos_url":"https:\/\/api.github.com\/users\/diego-fustes\/repos","events_url":"https:\/\/api.github.com\/users\/diego-fustes\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/diego-fustes\/received_events","type":"User","site_admin":false},"labels":[{"id":2067393914,"node_id":"MDU6TGFiZWwyMDY3MzkzOTE0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/metric%20bug","name":"metric bug","color":"25b21e","default":false,"description":"A bug in a metric script"}],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":11,"created_at":"2021-03-18T11:30:05Z","updated_at":"2021-05-01T08:31:49Z","closed_at":"2021-04-06T07:20:43Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, I'm trying to follow the ASR example to try Wav2Vec. This is the code that I use for WER calculation:\r\n\r\n```\r\nwer = load_metric(\"wer\")\r\nprint(wer.compute(predictions=result[\"predicted\"], references=result[\"target\"]))\r\n```\r\n\r\nHowever, I receive the following exception:\r\n\r\n`Traceback (most recent call last):\r\n File \"\/home\/diego\/IpGlobal\/wav2vec\/test_wav2vec.py\", line 51, in \r\n print(wer.compute(predictions=result[\"predicted\"], references=result[\"target\"]))\r\n File \"\/home\/diego\/miniconda3\/envs\/wav2vec3.6\/lib\/python3.6\/site-packages\/datasets\/metric.py\", line 403, in compute\r\n output = self._compute(predictions=predictions, references=references, **kwargs)\r\n File \"\/home\/diego\/.cache\/huggingface\/modules\/datasets_modules\/metrics\/wer\/73b2d32b723b7fb8f204d785c00980ae4d937f12a65466f8fdf78706e2951281\/wer.py\", line 94, in _compute\r\n return wer(references, predictions)\r\n File \"\/home\/diego\/miniconda3\/envs\/wav2vec3.6\/lib\/python3.6\/site-packages\/jiwer\/measures.py\", line 81, in wer\r\n truth, hypothesis, truth_transform, hypothesis_transform, **kwargs\r\n File \"\/home\/diego\/miniconda3\/envs\/wav2vec3.6\/lib\/python3.6\/site-packages\/jiwer\/measures.py\", line 192, in compute_measures\r\n H, S, D, I = _get_operation_counts(truth, hypothesis)\r\n File \"\/home\/diego\/miniconda3\/envs\/wav2vec3.6\/lib\/python3.6\/site-packages\/jiwer\/measures.py\", line 273, in _get_operation_counts\r\n editops = Levenshtein.editops(source_string, destination_string)\r\nMemoryError`\r\n\r\nMy system has more than 10GB of available RAM. Looking at the code, I think that it could be related to the way jiwer does the calculation, as it is pasting all the sentences in a single string before calling Levenshtein editops function.\r\n\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2077\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2077","id":834649536,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1NDI0MTYw","number":2077,"title":"Bump huggingface_hub version","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-18T10:54:34Z","updated_at":"2021-03-18T11:33:26Z","closed_at":"2021-03-18T11:33:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2077","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2077","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2077.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2077.patch"},"body":"`0.0.2 => 0.0.6`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2076\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2076","id":834445296,"node_id":"MDU6SXNzdWU4MzQ0NDUyOTY=","number":2076,"title":"Issue: Dataset download error","user":{"login":"XuhuiZhou","id":20436061,"node_id":"MDQ6VXNlcjIwNDM2MDYx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20436061?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/XuhuiZhou","html_url":"https:\/\/github.com\/XuhuiZhou","followers_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/followers","following_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/orgs","repos_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/repos","events_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/XuhuiZhou\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-18T06:36:06Z","updated_at":"2021-03-22T11:52:31Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"The download link in `iwslt2017.py` file does not seem to work anymore.\r\n\r\nFor example, `FileNotFoundError: Couldn't find file at https:\/\/wit3.fbk.eu\/archive\/2017-01-trnted\/texts\/zh\/en\/zh-en.tgz`\r\n\r\nWould be nice if we could modify it script and use the new downloadable link?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2075\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2075","id":834301246,"node_id":"MDU6SXNzdWU4MzQzMDEyNDY=","number":2075,"title":"ConnectionError: Couldn't reach common_voice.py","user":{"login":"LifaSun","id":6188893,"node_id":"MDQ6VXNlcjYxODg4OTM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6188893?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LifaSun","html_url":"https:\/\/github.com\/LifaSun","followers_url":"https:\/\/api.github.com\/users\/LifaSun\/followers","following_url":"https:\/\/api.github.com\/users\/LifaSun\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LifaSun\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LifaSun\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LifaSun\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LifaSun\/orgs","repos_url":"https:\/\/api.github.com\/users\/LifaSun\/repos","events_url":"https:\/\/api.github.com\/users\/LifaSun\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LifaSun\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-18T01:19:06Z","updated_at":"2021-03-20T10:29:41Z","closed_at":"2021-03-20T10:29:41Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"When I run: \r\nfrom datasets import load_dataset, load_metric\r\n\r\ncommon_voice_train = load_dataset(\"common_voice\", \"zh-CN\", split=\"train+validation\")\r\ncommon_voice_test = load_dataset(\"common_voice\", \"zh-CN\", split=\"test\")\r\n\r\nGot:\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/common_voice\/common_voice.py\r\n\r\nVersion:\r\n1.4.1\r\n\r\nThanks! @lhoestq @LysandreJik @thomwolf ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2074\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2074","id":834268463,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1MTIzMjYw","number":2074,"title":"Fix size categories in YAML Tags","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-03-18T00:02:36Z","updated_at":"2021-03-23T17:11:10Z","closed_at":"2021-03-23T17:11:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2074","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2074","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2074.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2074.patch"},"body":"This PR fixes several `size_categories` in YAML tags and makes them consistent. Additionally, I have added a few more categories after `1M`, up to `1T`. I would like to add that to the streamlit app also.\r\n\r\nThis PR also adds a couple of infos that I found missing.\r\n\r\nThe code for generating this:\r\n```python\r\nfor dataset in sorted(os.listdir('.\/datasets\/')):\r\n if '.' not in dataset and dataset not in ['c4', 'csv', 'downloads', 'cc100', 'ccaligned_multilingual', 'celeb_a', 'chr_en', 'emea', 'glue']:\r\n infos = {}\r\n stats = {}\r\n st = ''\r\n with open(f'datasets\/{dataset}\/README.md') as f:\r\n d = f.read()\r\n start_dash = d.find('---') + 3\r\n end_dash = d[start_dash:].find('---') + 3\r\n rest_text = d[end_dash + 3:]\r\n try:\r\n full_yaml = OmegaConf.create(d[start_dash:end_dash])\r\n readme = OmegaConf.to_container(full_yaml['size_categories'], resolve=True)\r\n except Exception as e:\r\n print(e)\r\n continue \r\n try:\r\n with open(f'datasets\/{dataset}\/dataset_infos.json') as f:\r\n data = json.load(f)\r\n except Exception as e:\r\n print(e)\r\n continue # Skip those without infos.\r\n done_set = set([])\r\n num_keys = len(data.keys())\r\n for keys in data:\r\n # dataset = load_dataset('opus100', f'{dirs}')\r\n total = 0\r\n for split in data[keys]['splits']:\r\n total = total + data[keys]['splits'][split]['num_examples']\r\n if total < 1000:\r\n st += \"- n<1K\" + '\\n'\r\n infos[keys] = [\"n<1K\"]\r\n elif total >= 1000 and total < 10000:\r\n infos[keys] = [\"1K= 10000 and total < 100000:\r\n infos[keys] = [\"10K= 100000 and total < 1000000:\r\n infos[keys] = [\"100K= 1000000 and total < 10000000:\r\n infos[keys] = [\"1M= 10000000 and total < 100000000:\r\n infos[keys] = [\"10M= 100000000 and total < 1000000000:\r\n infos[keys] = [\"100M= 1000000000 and total < 10000000000:\r\n infos[keys] = [\"1B= 10000000000 and total < 100000000000:\r\n infos[keys] = [\"10B= 100000000000 and total < 1000000000000:\r\n infos[keys] = [\"100B1T\"]\r\n done_set = done_set.union(infos[keys])\r\n if (isinstance(readme, list) and list(infos.values())[0] != readme) or (isinstance(readme, dict) and readme != infos):\r\n\r\n print('-' * 30)\r\n print(done_set)\r\n print(f\"Changing Full YAML for {dataset}\")\r\n print(OmegaConf.to_yaml(full_yaml))\r\n\r\n if len(done_set) == 1:\r\n full_yaml['size_categories'] = list(done_set)\r\n else:\r\n full_yaml['size_categories'] = dict([(k, v) for k, v in sorted(infos.items(), key=lambda x: x[0])])\r\n\r\n full_yaml_string = OmegaConf.to_yaml(full_yaml)\r\n print('-' * 30)\r\n print(full_yaml_string)\r\n inp = input('Do you wish to continue?(Y\/N)')\r\n if inp == 'Y':\r\n with open(f'.\/datasets\/{dataset}\/README.md', 'w') as f:\r\n f.write('---\\n')\r\n f.write(full_yaml_string)\r\n f.write('---')\r\n f.write(rest_text)\r\n else:\r\n break\r\n```\r\n\r\nNote that the lower-bound is inclusive. I'm unsure if this is how it is done in the tagging app.\r\n\r\nEDIT:\r\nIt would be great if there was a way to make the task categories consistent too. For this, the streamlit app can look into all the datasets and check for existing categories and show them in the list. This may add some consistency.\r\n\r\nEDIT:\r\nI understand this will not work for cases where only the infos for some of the configs are present, for example: `ccaligned_multingual` has only 5 out of several configs present, and infos has only information about them. Hence, I have skipped a few datasets in the code, if there are more such datasets, then I'll ignore them too.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2073\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2073","id":834192501,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk1MDYyMzQ2","number":2073,"title":"Fixes check of TF_AVAILABLE and TORCH_AVAILABLE","user":{"login":"philschmid","id":32632186,"node_id":"MDQ6VXNlcjMyNjMyMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32632186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/philschmid","html_url":"https:\/\/github.com\/philschmid","followers_url":"https:\/\/api.github.com\/users\/philschmid\/followers","following_url":"https:\/\/api.github.com\/users\/philschmid\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/philschmid\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/philschmid\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/philschmid\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/philschmid\/orgs","repos_url":"https:\/\/api.github.com\/users\/philschmid\/repos","events_url":"https:\/\/api.github.com\/users\/philschmid\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/philschmid\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-17T21:28:53Z","updated_at":"2021-03-18T09:09:25Z","closed_at":"2021-03-18T09:09:24Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2073","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2073","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2073.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2073.patch"},"body":"# What is this PR doing\r\n\r\nThis PR implements the checks if `Tensorflow` and `Pytorch` are available the same way as `transformers` does it. I added the additional checks for the different `Tensorflow` and `torch` versions. #2068 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2072\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2072","id":834054837,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0OTQ5NjA4","number":2072,"title":"Fix docstring issues","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-17T18:13:44Z","updated_at":"2021-03-24T08:20:57Z","closed_at":"2021-03-18T12:41:21Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2072","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2072","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2072.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2072.patch"},"body":"Fix docstring issues.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2071\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2071","id":833950824,"node_id":"MDU6SXNzdWU4MzM5NTA4MjQ=","number":2071,"title":"Multiprocessing is slower than single process","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-17T16:08:58Z","updated_at":"2021-03-18T09:10:23Z","closed_at":"2021-03-18T09:10:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"```python\r\n# benchmark_filter.py\r\nimport logging\r\nimport sys\r\nimport time\r\n\r\nfrom datasets import load_dataset, set_caching_enabled\r\n\r\n\r\nif __name__ == \"__main__\":\r\n set_caching_enabled(False)\r\n logging.basicConfig(level=logging.DEBUG)\r\n\r\n bc = load_dataset(\"bookcorpus\")\r\n\r\n now = time.time()\r\n try:\r\n bc[\"train\"].filter(lambda x: len(x[\"text\"]) < 64, num_proc=int(sys.argv[1]))\r\n except Exception as e:\r\n print(f\"cancelled: {e}\")\r\n elapsed = time.time() - now\r\n\r\n print(elapsed)\r\n```\r\n\r\nRunning `python benchmark_filter.py 1` (20min+) is faster than `python benchmark_filter.py 2` (2hrs+)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2070\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2070","id":833799035,"node_id":"MDU6SXNzdWU4MzM3OTkwMzU=","number":2070,"title":"ArrowInvalid issue for squad v2 dataset","user":{"login":"MichaelYxWang","id":29818977,"node_id":"MDQ6VXNlcjI5ODE4OTc3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29818977?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MichaelYxWang","html_url":"https:\/\/github.com\/MichaelYxWang","followers_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/followers","following_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/orgs","repos_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/repos","events_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MichaelYxWang\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-17T13:51:49Z","updated_at":"2021-08-04T17:57:16Z","closed_at":"2021-08-04T17:57:16Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello, I am using the huggingface official question answering example notebook (https:\/\/colab.research.google.com\/github\/huggingface\/notebooks\/blob\/master\/examples\/question_answering.ipynb). \r\n\r\nIn the prepare_validation_features function, I made some modifications to tokenize a new set of quesions with the original contexts and save them in three different list called candidate_input_dis, candidate_attetion_mask and candidate_token_type_ids. When I try to run the next cell for dataset.map, I got the following error:\r\n\r\n`ArrowInvalid: Column 1 named candidate_attention_mask expected length 1180 but got length 1178`\r\n\r\nMy code is as follows:\r\n\r\n```\r\ndef generate_candidate_questions(examples):\r\n val_questions = examples[\"question\"]\r\n candididate_questions = random.sample(datasets[\"train\"][\"question\"], len(val_questions))\r\n candididate_questions = [x[:max_length] for x in candididate_questions]\r\n return candididate_questions\r\n\r\ndef prepare_validation_features(examples, use_mixing=False):\r\n pad_on_right = tokenizer.padding_side == \"right\"\r\n tokenized_examples = tokenizer(\r\n examples[\"question\" if pad_on_right else \"context\"],\r\n examples[\"context\" if pad_on_right else \"question\"],\r\n truncation=\"only_second\" if pad_on_right else \"only_first\",\r\n max_length=max_length,\r\n stride=doc_stride,\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\",\r\n )\r\n if use_mixing:\r\n candidate_questions = generate_candidate_questions(examples)\r\n tokenized_candidates = tokenizer(\r\n candidate_questions if pad_on_right else examples[\"context\"],\r\n examples[\"context\"] if pad_on_right else candidate_questions,\r\n truncation=\"only_second\" if pad_on_right else \"only_first\",\r\n max_length=max_length,\r\n stride=doc_stride,\r\n return_overflowing_tokens=True,\r\n return_offsets_mapping=True,\r\n padding=\"max_length\",\r\n )\r\n\r\n sample_mapping = tokenized_examples.pop(\"overflow_to_sample_mapping\")\r\n\r\n tokenized_examples[\"example_id\"] = []\r\n\r\n if use_mixing:\r\n tokenized_examples[\"candidate_input_ids\"] = tokenized_candidates[\"input_ids\"]\r\n tokenized_examples[\"candidate_attention_mask\"] = tokenized_candidates[\"attention_mask\"]\r\n tokenized_examples[\"candidate_token_type_ids\"] = tokenized_candidates[\"token_type_ids\"]\r\n\r\n for i in range(len(tokenized_examples[\"input_ids\"])):\r\n sequence_ids = tokenized_examples.sequence_ids(i)\r\n context_index = 1 if pad_on_right else 0\r\n\r\n sample_index = sample_mapping[i]\r\n tokenized_examples[\"example_id\"].append(examples[\"id\"][sample_index])\r\n\r\n tokenized_examples[\"offset_mapping\"][i] = [\r\n (o if sequence_ids[k] == context_index else None)\r\n for k, o in enumerate(tokenized_examples[\"offset_mapping\"][i])\r\n ]\r\n\r\n return tokenized_examples\r\n\r\n\r\n\r\nvalidation_features = datasets[\"validation\"].map(\r\n lambda xs: prepare_validation_features(xs, True),\r\n batched=True,\r\n remove_columns=datasets[\"validation\"].column_names\r\n)\r\n```\r\n\r\nI guess this might happen because of the batched=True. I see similar issues in this repo related to arrow table length mismatch error, but in their cases, the numbers vary a lot. In my case, this error always happens when the expected length and unexpected length are very close. Thanks for the help!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2069\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2069","id":833768926,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0NzA5ODYw","number":2069,"title":"Add and fix docstring for NamedSplit","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-17T13:19:28Z","updated_at":"2021-03-18T10:27:40Z","closed_at":"2021-03-18T10:27:40Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2069","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2069","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2069.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2069.patch"},"body":"Add and fix docstring for `NamedSplit`, which was missing.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2068\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2068","id":833602832,"node_id":"MDU6SXNzdWU4MzM2MDI4MzI=","number":2068,"title":"PyTorch not available error on SageMaker GPU docker though it is installed ","user":{"login":"sivakhno","id":1651457,"node_id":"MDQ6VXNlcjE2NTE0NTc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1651457?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sivakhno","html_url":"https:\/\/github.com\/sivakhno","followers_url":"https:\/\/api.github.com\/users\/sivakhno\/followers","following_url":"https:\/\/api.github.com\/users\/sivakhno\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sivakhno\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sivakhno\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sivakhno\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sivakhno\/orgs","repos_url":"https:\/\/api.github.com\/users\/sivakhno\/repos","events_url":"https:\/\/api.github.com\/users\/sivakhno\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sivakhno\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-17T10:04:27Z","updated_at":"2021-06-14T04:47:30Z","closed_at":"2021-06-14T04:47:30Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I get en error when running data loading using SageMaker SDK\r\n\r\n```\r\n File \"main.py\", line 34, in \r\n run_training()\r\n File \"main.py\", line 25, in run_training\r\n dm.setup('fit')\r\n File \"\/opt\/conda\/lib\/python3.6\/site-packages\/pytorch_lightning\/core\/datamodule.py\", line 92, in wrapped_fn\r\n return fn(*args, **kwargs)\r\n File \"\/opt\/ml\/code\/data_module.py\", line 103, in setup\r\n self.dataset[split].set_format(type=\"torch\", columns=self.columns)\r\n File \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/fingerprint.py\", line 337, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/arrow_dataset.py\", line 995, in set_format\r\n _ = get_formatter(type, **format_kwargs)\r\nFile \"\/opt\/conda\/lib\/python3.6\/site-packages\/datasets\/formatting\/__init__.py\", line 114, in get_formatter\r\n raise _FORMAT_TYPES_ALIASES_UNAVAILABLE[format_type]\r\nValueError: PyTorch needs to be installed to be able to return PyTorch tensors.\r\n```\r\n\r\nwhen trying to execute dataset loading using this notebook https:\/\/github.com\/PyTorchLightning\/pytorch-lightning\/blob\/master\/notebooks\/04-transformers-text-classification.ipynb, specifically lines \r\n\r\n```\r\nself.columns = [c for c in self.dataset[split].column_names if c in self.loader_columns]\r\nself.dataset[split].set_format(type=\"torch\", columns=self.columns)\r\n```\r\n\r\nThe SageMaker docker image used is 763104351884.dkr.ecr.eu-central-1.amazonaws.com\/pytorch-training:1.4.0-gpu-py3 .\r\n\r\nBy running container interactively I have checked that torch loading completes successfully by executing `https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/config.py#L39`. \r\n\r\nAlso as a first line in the data loading module I have \r\n\r\n```\r\nimport os\r\nos.environ[\"USE_TF\"] = \"0\" \r\nos.environ[\"USE_TORCH\"] = \"1\" \r\n````\r\n\r\nBut unfortunately the error stills persists. Any suggestions would be appreciated as I am stack.\r\nMany Thanks! \r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2067\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2067","id":833559940,"node_id":"MDU6SXNzdWU4MzM1NTk5NDA=","number":2067,"title":"Multiprocessing windows error","user":{"login":"flozi00","id":47894090,"node_id":"MDQ6VXNlcjQ3ODk0MDkw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47894090?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/flozi00","html_url":"https:\/\/github.com\/flozi00","followers_url":"https:\/\/api.github.com\/users\/flozi00\/followers","following_url":"https:\/\/api.github.com\/users\/flozi00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/flozi00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/flozi00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/flozi00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/flozi00\/orgs","repos_url":"https:\/\/api.github.com\/users\/flozi00\/repos","events_url":"https:\/\/api.github.com\/users\/flozi00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/flozi00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-03-17T09:12:28Z","updated_at":"2021-08-04T17:59:08Z","closed_at":"2021-08-04T17:59:08Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"As described here https:\/\/huggingface.co\/blog\/fine-tune-xlsr-wav2vec2\r\n\r\nWhen using the num_proc argument on windows the whole Python environment crashes and hanging in loop.\r\nFor example at the map_to_array part.\r\nAn error occures because the cache file already exists and windows throws and error. After this the log crashes into an loop ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2066\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2066","id":833480551,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0NDcwMjEz","number":2066,"title":"Fix docstring rendering of Dataset\/DatasetDict.from_csv args","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-17T07:23:10Z","updated_at":"2021-03-17T09:21:21Z","closed_at":"2021-03-17T09:21:21Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2066","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2066","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2066.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2066.patch"},"body":"Fix the docstring rendering of Dataset\/DatasetDict.from_csv args.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2065\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2065","id":833291432,"node_id":"MDU6SXNzdWU4MzMyOTE0MzI=","number":2065,"title":"Only user permission of saved cache files, not group","user":{"login":"lorr1","id":57237365,"node_id":"MDQ6VXNlcjU3MjM3MzY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/57237365?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lorr1","html_url":"https:\/\/github.com\/lorr1","followers_url":"https:\/\/api.github.com\/users\/lorr1\/followers","following_url":"https:\/\/api.github.com\/users\/lorr1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lorr1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lorr1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lorr1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lorr1\/orgs","repos_url":"https:\/\/api.github.com\/users\/lorr1\/repos","events_url":"https:\/\/api.github.com\/users\/lorr1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lorr1\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":19,"created_at":"2021-03-17T00:20:22Z","updated_at":"2021-05-10T06:45:29Z","closed_at":"2021-05-10T06:45:29Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello,\r\n\r\nIt seems when a cached file is saved from calling `dataset.map` for preprocessing, it gets the user permissions and none of the user's group permissions. As we share data files across members of our team, this is causing a bit of an issue as we have to continually reset the permission of the files. Do you know any ways around this or a way to correctly set the permissions?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2064\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2064","id":833002360,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0MDczOTQ1","number":2064,"title":"Fix ted_talks_iwslt version error","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-16T16:43:45Z","updated_at":"2021-03-16T18:00:08Z","closed_at":"2021-03-16T18:00:08Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2064","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2064","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2064.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2064.patch"},"body":"This PR fixes the bug where the version argument would be passed twice if the dataset configuration was created on the fly.\r\n\r\nFixes #2059 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2063\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2063","id":832993705,"node_id":"MDExOlB1bGxSZXF1ZXN0NTk0MDY2NzI5","number":2063,"title":"[Common Voice] Adapt dataset script so that no manual data download is actually needed","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-16T16:33:44Z","updated_at":"2021-03-17T09:42:52Z","closed_at":"2021-03-17T09:42:37Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2063","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2063","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2063.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2063.patch"},"body":"This PR changes the dataset script so that no manual data dir is needed anymore. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2062\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2062","id":832625483,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkzNzUyNTMz","number":2062,"title":"docs: fix missing quotation","user":{"login":"neal2018","id":46561493,"node_id":"MDQ6VXNlcjQ2NTYxNDkz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/46561493?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/neal2018","html_url":"https:\/\/github.com\/neal2018","followers_url":"https:\/\/api.github.com\/users\/neal2018\/followers","following_url":"https:\/\/api.github.com\/users\/neal2018\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/neal2018\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/neal2018\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/neal2018\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/neal2018\/orgs","repos_url":"https:\/\/api.github.com\/users\/neal2018\/repos","events_url":"https:\/\/api.github.com\/users\/neal2018\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/neal2018\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-16T10:07:54Z","updated_at":"2021-03-17T09:21:57Z","closed_at":"2021-03-17T09:21:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2062","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2062","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2062.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2062.patch"},"body":"The json code misses a quote","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2061\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2061","id":832596228,"node_id":"MDU6SXNzdWU4MzI1OTYyMjg=","number":2061,"title":"Cannot load udpos subsets from xtreme dataset using load_dataset()","user":{"login":"adzcodez","id":55791365,"node_id":"MDQ6VXNlcjU1NzkxMzY1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/55791365?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/adzcodez","html_url":"https:\/\/github.com\/adzcodez","followers_url":"https:\/\/api.github.com\/users\/adzcodez\/followers","following_url":"https:\/\/api.github.com\/users\/adzcodez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/adzcodez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/adzcodez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/adzcodez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/adzcodez\/orgs","repos_url":"https:\/\/api.github.com\/users\/adzcodez\/repos","events_url":"https:\/\/api.github.com\/users\/adzcodez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/adzcodez\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892877,"node_id":"MDU6TGFiZWwxOTM1ODkyODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/good%20first%20issue","name":"good first issue","color":"7057ff","default":true,"description":"Good for newcomers"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-16T09:32:13Z","updated_at":"2021-06-18T11:54:11Z","closed_at":"2021-06-18T11:54:10Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello, \r\n\r\nI am trying to load the udpos English subset from xtreme dataset, but this faces an error during loading. I am using datasets v1.4.1, pip install. I have tried with other udpos languages which also fail, though loading a different subset altogether (such as XNLI) has no issue. I have also tried on Colab and faced the same error. \r\n\r\nReprex is: \r\n\r\n`from datasets import load_dataset `\r\n`dataset = load_dataset('xtreme', 'udpos.English')`\r\n\r\nThe error is: \r\n`KeyError: '_'`\r\n\r\nThe full traceback is: \r\nKeyError Traceback (most recent call last)\r\n in \r\n 1 from datasets import load_dataset\r\n----> 2 dataset = load_dataset('xtreme', 'udpos.English')\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, **config_kwargs)\r\n 738 \r\n 739 # Download and prepare data\r\n--> 740 builder_instance.download_and_prepare(\r\n 741 download_config=download_config,\r\n 742 download_mode=download_mode,\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\builder.py in download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)\r\n 576 logger.warning(\"HF google storage unreachable. Downloading and preparing it from source\")\r\n 577 if not downloaded_from_gcs:\r\n--> 578 self._download_and_prepare(\r\n 579 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n 580 )\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\builder.py in _download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)\r\n 654 try:\r\n 655 # Prepare split will record examples associated to the split\r\n--> 656 self._prepare_split(split_generator, **prepare_split_kwargs)\r\n 657 except OSError as e:\r\n 658 raise OSError(\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\builder.py in _prepare_split(self, split_generator)\r\n 977 generator, unit=\" examples\", total=split_info.num_examples, leave=False, disable=not_verbose\r\n 978 ):\r\n--> 979 example = self.info.features.encode_example(record)\r\n 980 writer.write(example)\r\n 981 finally:\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in encode_example(self, example)\r\n 946 def encode_example(self, example):\r\n 947 example = cast_to_python_objects(example)\r\n--> 948 return encode_nested_example(self, example)\r\n 949 \r\n 950 def encode_batch(self, batch):\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in encode_nested_example(schema, obj)\r\n 840 # Nested structures: we allow dict, list\/tuples, sequences\r\n 841 if isinstance(schema, dict):\r\n--> 842 return {\r\n 843 k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n 844 }\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in (.0)\r\n 841 if isinstance(schema, dict):\r\n 842 return {\r\n--> 843 k: encode_nested_example(sub_schema, sub_obj) for k, (sub_schema, sub_obj) in utils.zip_dict(schema, obj)\r\n 844 }\r\n 845 elif isinstance(schema, (list, tuple)):\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in encode_nested_example(schema, obj)\r\n 868 # ClassLabel will convert from string to int, TranslationVariableLanguages does some checks\r\n 869 elif isinstance(schema, (ClassLabel, TranslationVariableLanguages, Value, _ArrayXD)):\r\n--> 870 return schema.encode_example(obj)\r\n 871 # Other object should be directly convertible to a native Arrow type (like Translation and Translation)\r\n 872 return obj\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in encode_example(self, example_data)\r\n 647 # If a string is given, convert to associated integer\r\n 648 if isinstance(example_data, str):\r\n--> 649 example_data = self.str2int(example_data)\r\n 650 \r\n 651 # Allowing -1 to mean no label.\r\n\r\n~\\Anaconda3\\envs\\mlenv\\lib\\site-packages\\datasets\\features.py in str2int(self, values)\r\n 605 if value not in self._str2int:\r\n 606 value = value.strip()\r\n--> 607 output.append(self._str2int[str(value)])\r\n 608 else:\r\n 609 # No names provided, try to integerize\r\n\r\nKeyError: '_'\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2060\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2060","id":832588591,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkzNzIxNzcx","number":2060,"title":"Filtering refactor","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"assignees":[{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":9,"created_at":"2021-03-16T09:23:30Z","updated_at":"2021-03-31T09:38:48Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2060","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2060","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2060.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2060.patch"},"body":"fix https:\/\/github.com\/huggingface\/datasets\/issues\/2032\r\n\r\nbenchmarking is somewhat inconclusive, currently running on `book_corpus` with:\r\n\r\n```python\r\n bc = load_dataset(\"bookcorpus\")\r\n now = time.time()\r\n bc.filter(lambda x: len(x[\"text\"]) < 64)\r\n elapsed = time.time() - now\r\n print(elapsed)\r\n```\r\n\r\nthis branch does it in 233 seconds, master in 1409 seconds.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2059\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2059","id":832579156,"node_id":"MDU6SXNzdWU4MzI1NzkxNTY=","number":2059,"title":"Error while following docs to load the `ted_talks_iwslt` dataset","user":{"login":"ekdnam","id":40426312,"node_id":"MDQ6VXNlcjQwNDI2MzEy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/40426312?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ekdnam","html_url":"https:\/\/github.com\/ekdnam","followers_url":"https:\/\/api.github.com\/users\/ekdnam\/followers","following_url":"https:\/\/api.github.com\/users\/ekdnam\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ekdnam\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ekdnam\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ekdnam\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ekdnam\/orgs","repos_url":"https:\/\/api.github.com\/users\/ekdnam\/repos","events_url":"https:\/\/api.github.com\/users\/ekdnam\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ekdnam\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-16T09:12:19Z","updated_at":"2021-03-16T18:00:31Z","closed_at":"2021-03-16T18:00:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I am currently trying to load the `ted_talks_iwslt` dataset into google colab.\r\n\r\nThe [docs](https:\/\/huggingface.co\/datasets\/ted_talks_iwslt) mention the following way of doing so.\r\n\r\n```python\r\ndataset = load_dataset(\"ted_talks_iwslt\", language_pair=(\"it\", \"pl\"), year=\"2014\")\r\n```\r\n\r\nExecuting it results in the error attached below.\r\n\r\n```\r\n---------------------------------------------------------------------------\r\nTypeError Traceback (most recent call last)\r\n in ()\r\n----> 1 dataset = load_dataset(\"ted_talks_iwslt\", language_pair=(\"it\", \"pl\"), year=\"2014\")\r\n\r\n4 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/load.py in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, script_version, use_auth_token, **config_kwargs)\r\n 730 hash=hash,\r\n 731 features=features,\r\n--> 732 **config_kwargs,\r\n 733 )\r\n 734 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in __init__(self, writer_batch_size, *args, **kwargs)\r\n 927 \r\n 928 def __init__(self, *args, writer_batch_size=None, **kwargs):\r\n--> 929 super(GeneratorBasedBuilder, self).__init__(*args, **kwargs)\r\n 930 # Batch size used by the ArrowWriter\r\n 931 # It defines the number of samples that are kept in memory before writing them\r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in __init__(self, cache_dir, name, hash, features, **config_kwargs)\r\n 241 name,\r\n 242 custom_features=features,\r\n--> 243 **config_kwargs,\r\n 244 )\r\n 245 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/builder.py in _create_builder_config(self, name, custom_features, **config_kwargs)\r\n 337 if \"version\" not in config_kwargs and hasattr(self, \"VERSION\") and self.VERSION:\r\n 338 config_kwargs[\"version\"] = self.VERSION\r\n--> 339 builder_config = self.BUILDER_CONFIG_CLASS(**config_kwargs)\r\n 340 \r\n 341 # otherwise use the config_kwargs to overwrite the attributes\r\n\r\n\/root\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/ted_talks_iwslt\/024d06b1376b361e59245c5878ab8acf9a7576d765f2d0077f61751158e60914\/ted_talks_iwslt.py in __init__(self, language_pair, year, **kwargs)\r\n 219 description=description,\r\n 220 version=datasets.Version(\"1.1.0\", \"\"),\r\n--> 221 **kwargs,\r\n 222 )\r\n 223 \r\n\r\nTypeError: __init__() got multiple values for keyword argument 'version'\r\n```\r\n\r\nHow to resolve this? \r\n\r\nPS: Thanks a lot @huggingface team for creating this great library!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2058\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2058","id":832159844,"node_id":"MDU6SXNzdWU4MzIxNTk4NDQ=","number":2058,"title":"Is it possible to convert a `tfds` to HuggingFace `dataset`?","user":{"login":"abarbosa94","id":6608232,"node_id":"MDQ6VXNlcjY2MDgyMzI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6608232?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/abarbosa94","html_url":"https:\/\/github.com\/abarbosa94","followers_url":"https:\/\/api.github.com\/users\/abarbosa94\/followers","following_url":"https:\/\/api.github.com\/users\/abarbosa94\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/abarbosa94\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/abarbosa94\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/abarbosa94\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/abarbosa94\/orgs","repos_url":"https:\/\/api.github.com\/users\/abarbosa94\/repos","events_url":"https:\/\/api.github.com\/users\/abarbosa94\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/abarbosa94\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-15T20:18:47Z","updated_at":"2021-03-15T20:18:47Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I was having some weird bugs with `C4`dataset version of HuggingFace, so I decided to try to download `C4`from `tfds`. I would like to know if it is possible to convert a tfds dataset to HuggingFace dataset format :)\r\n\r\nI can also open a new issue reporting the bug I'm receiving with `datasets.load_dataset('c4','en')` in the future if you think that it would be useful.\r\n\r\nThanks!\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2057\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2057","id":832120522,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkzMzMzMjM0","number":2057,"title":"update link to ZEST dataset","user":{"login":"matt-peters","id":619844,"node_id":"MDQ6VXNlcjYxOTg0NA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/619844?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/matt-peters","html_url":"https:\/\/github.com\/matt-peters","followers_url":"https:\/\/api.github.com\/users\/matt-peters\/followers","following_url":"https:\/\/api.github.com\/users\/matt-peters\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/matt-peters\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/matt-peters\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/matt-peters\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/matt-peters\/orgs","repos_url":"https:\/\/api.github.com\/users\/matt-peters\/repos","events_url":"https:\/\/api.github.com\/users\/matt-peters\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/matt-peters\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-15T19:22:57Z","updated_at":"2021-03-16T17:06:28Z","closed_at":"2021-03-16T17:06:28Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2057","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2057","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2057.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2057.patch"},"body":"Updating the link as the original one is no longer working. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2056\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2056","id":831718397,"node_id":"MDU6SXNzdWU4MzE3MTgzOTc=","number":2056,"title":"issue with opus100\/en-fr dataset ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-15T11:32:42Z","updated_at":"2021-03-16T15:49:00Z","closed_at":"2021-03-16T15:48:59Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI am running run_mlm.py code of huggingface repo with opus100\/fr-en pair, I am getting this error, note that this error occurs for only this pairs and not the other pairs. Any idea why this is occurring? and how I can solve this? \r\n\r\nThanks a lot @lhoestq for your help in advance.\r\n\r\n`\r\nthread '' panicked at 'index out of bounds: the len is 617 but the index is 617', \/__w\/tokenizers\/tokenizers\/tokenizers\/src\/tokenizer\/normalizer.rs:382:21\r\nnote: run with `RUST_BACKTRACE=1` environment variable to display a backtrace\r\n 63%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 626\/1000 [00:27<00:16, 22.69ba\/s]\r\n\r\nTraceback (most recent call last):\r\n File \"run_mlm.py\", line 550, in \r\n main()\r\n File \"run_mlm.py\", line 412, in main\r\n in zip(data_args.dataset_name, data_args.dataset_config_name)]\r\n File \"run_mlm.py\", line 411, in \r\n logger) for dataset_name, dataset_config_name\\\r\n File \"\/user\/dara\/dev\/codes\/seq2seq\/data\/tokenize_datasets.py\", line 96, in get_tokenized_dataset\r\n load_from_cache_file=not data_args.overwrite_cache,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py\", line 448, in map\r\n for k, dataset in self.items()\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/dataset_dict.py\", line 448, in \r\n for k, dataset in self.items()\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1309, in map\r\n update_data=update_data,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 204, in wrapper\r\n out: Union[\"Dataset\", \"DatasetDict\"] = func(self, *args, **kwargs)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/fingerprint.py\", line 337, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1574, in _map_single\r\n batch, indices, check_same_num_examples=len(self.list_indexes()) > 0, offset=offset\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/arrow_dataset.py\", line 1490, in apply_function_on_filtered_inputs\r\n function(*fn_args, effective_indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n File \"\/user\/dara\/dev\/codes\/seq2seq\/data\/tokenize_datasets.py\", line 89, in tokenize_function\r\n return tokenizer(examples[text_column_name], return_special_tokens_mask=True)\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 2347, in __call__\r\n **kwargs,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_base.py\", line 2532, in batch_encode_plus\r\n **kwargs,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/transformers\/tokenization_utils_fast.py\", line 384, in _batch_encode_plus\r\n is_pretokenized=is_split_into_words,\r\npyo3_runtime.PanicException: index out of bounds: the len is 617 but the index is 617\r\n\r\n`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2055\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2055","id":831684312,"node_id":"MDU6SXNzdWU4MzE2ODQzMTI=","number":2055,"title":"is there a way to override a dataset object saved with save_to_disk?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-15T10:50:53Z","updated_at":"2021-03-22T04:06:17Z","closed_at":"2021-03-22T04:06:17Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"At the moment when I use save_to_disk, it uses the arbitrary name for the arrow file. Is there a way to override such an object? ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2054\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2054","id":831597665,"node_id":"MDU6SXNzdWU4MzE1OTc2NjU=","number":2054,"title":"Could not find file for ZEST dataset","user":{"login":"bhadreshpsavani","id":26653468,"node_id":"MDQ6VXNlcjI2NjUzNDY4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26653468?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhadreshpsavani","html_url":"https:\/\/github.com\/bhadreshpsavani","followers_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/followers","following_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/repos","events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhadreshpsavani\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-15T09:11:58Z","updated_at":"2021-05-03T09:30:24Z","closed_at":"2021-05-03T09:30:24Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I am trying to use zest dataset from Allen AI using below code in colab,\r\n```\r\n!pip install -q datasets\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"zest\")\r\n```\r\n\r\nI am getting the following error,\r\n```\r\nUsing custom data configuration default\r\n\r\nDownloading and preparing dataset zest\/default (download: 5.53 MiB, generated: 19.96 MiB, post-processed: Unknown size, total: 25.48 MiB) to \/root\/.cache\/huggingface\/datasets\/zest\/default\/0.0.0\/1f7a230fbfc964d979bbca0f0130fbab3259fce547ee758ad8aa4f9c9bec6cca...\r\n---------------------------------------------------------------------------\r\nFileNotFoundError Traceback (most recent call last)\r\n in ()\r\n 1 from datasets import load_dataset\r\n 2 \r\n----> 3 dataset = load_dataset(\"zest\")\r\n\r\n9 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/utils\/file_utils.py in get_from_cache(url, cache_dir, force_download, proxies, etag_timeout, resume_download, user_agent, local_files_only, use_etag, max_retries, use_auth_token)\r\n 612 )\r\n 613 elif response is not None and response.status_code == 404:\r\n--> 614 raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\n 615 _raise_if_offline_mode_is_enabled(f\"Tried to reach {url}\")\r\n 616 raise ConnectionError(\"Couldn't reach {}\".format(url))\r\n\r\nFileNotFoundError: Couldn't find file at https:\/\/ai2-datasets.s3-us-west-2.amazonaws.com\/zest\/zest.zip\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2053\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2053","id":831151728,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkyNTM4ODY2","number":2053,"title":"Add bAbI QA tasks","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-14T13:04:39Z","updated_at":"2021-03-29T12:41:48Z","closed_at":"2021-03-29T12:41:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2053","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2053","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2053.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2053.patch"},"body":"- **Name:** *The (20) QA bAbI tasks*\r\n- **Description:** *The (20) QA bAbI tasks are a set of proxy tasks that evaluate reading comprehension via question answering. Our tasks measure understanding in several ways: whether a system is able to answer questions via chaining facts, simple induction, deduction and many more. The tasks are designed to be prerequisites for any system that aims to be capable of conversing with a human. The aim is to classify these tasks into skill sets,so that researchers can identify (and then rectify) the failings of their systems.*\r\n- **Paper:** [arXiv](https:\/\/arxiv.org\/pdf\/1502.05698.pdf)\r\n- **Data:** [Facebook Research Page](https:\/\/research.fb.com\/downloads\/babi\/)\r\n- **Motivation:** This is a unique dataset with story-based Question Answering. It is a part of the `bAbI` project by Facebook Research.\r\n\r\n**Note**: I have currently added all the 160 configs. If this seems impractical, I can keep only a few. While each `dummy_data.zip` weighs a few KBs, overall it is around 1.3MB for all configurations. This is problematic. Let me know what is to be done.\r\n\r\nThanks :)\r\n\r\n\r\n### Checkbox\r\n\r\n- [x] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [x] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [x] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [x] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [x] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [x] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card `README.md` using the template : fill the tags and the various paragraphs\r\n- [x] Both tests for the real data and the dummy data pass.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2052\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2052","id":831135704,"node_id":"MDU6SXNzdWU4MzExMzU3MDQ=","number":2052,"title":"Timit_asr dataset repeats examples","user":{"login":"fermaat","id":7583522,"node_id":"MDQ6VXNlcjc1ODM1MjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7583522?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/fermaat","html_url":"https:\/\/github.com\/fermaat","followers_url":"https:\/\/api.github.com\/users\/fermaat\/followers","following_url":"https:\/\/api.github.com\/users\/fermaat\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/fermaat\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/fermaat\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/fermaat\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/fermaat\/orgs","repos_url":"https:\/\/api.github.com\/users\/fermaat\/repos","events_url":"https:\/\/api.github.com\/users\/fermaat\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/fermaat\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-14T11:43:43Z","updated_at":"2021-03-15T10:37:16Z","closed_at":"2021-03-15T10:37:16Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Summary\r\n\r\nWhen loading timit_asr dataset on datasets 1.4+, every row in the dataset is the same\r\nSteps to reproduce\r\n\r\nAs an example, on this code there is the text from the training part:\r\n\r\nCode snippet:\r\n```\r\nfrom datasets import load_dataset, load_metric\r\n\r\ntimit = load_dataset(\"timit_asr\")\r\ntimit['train']['text']\r\n#['Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n# 'Would such an act of refusal be useful?',\r\n```\r\nThe same behavior happens for other columns\r\n\r\nExpected behavior:\r\n\r\nDifferent info on the actual timit_asr dataset\r\n\r\nActual behavior:\r\n\r\nWhen loading timit_asr dataset on datasets 1.4+, every row in the dataset is the same. I've checked datasets 1.3 and the rows are different\r\nDebug info\r\n\r\n Streamlit version: (get it with $ streamlit version)\r\n Python version: Python 3.6.12\r\n Using Conda? PipEnv? PyEnv? Pex? Using pip\r\n OS version: Centos-release-7-9.2009.1.el7.centos.x86_64\r\n\r\nAdditional information\r\n\r\nYou can check the same behavior on https:\/\/huggingface.co\/datasets\/viewer\/?dataset=timit_asr","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2051\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2051","id":831027021,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkyNDQ2MDU1","number":2051,"title":"Add MDD Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-14T00:01:05Z","updated_at":"2021-03-19T11:15:44Z","closed_at":"2021-03-19T10:31:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2051","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2051","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2051.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2051.patch"},"body":"- **Name:** *MDD Dataset*\r\n- **Description:** The Movie Dialog dataset (MDD) is designed to measure how well models can perform at goal and non-goal orientated dialog centered around the topic of movies (question answering, recommendation and discussion), from various movie reviews sources such as MovieLens and OMDb.\r\n- **Paper:** [arXiv](https:\/\/arxiv.org\/pdf\/1511.06931.pdf)\r\n- **Data:** https:\/\/research.fb.com\/downloads\/babi\/\r\n- **Motivation:** This is one of the popular dialog datasets, a part of Facebook Research's \"bAbI project\".\r\n\r\n### Checkbox\r\n\r\n- [x] Create the dataset script `\/datasets\/my_dataset\/my_dataset.py` using the template\r\n- [x] Fill the `_DESCRIPTION` and `_CITATION` variables\r\n- [x] Implement `_infos()`, `_split_generators()` and `_generate_examples()`\r\n- [x] Make sure that the `BUILDER_CONFIGS` class attribute is filled with the different configurations of the dataset and that the `BUILDER_CONFIG_CLASS` is specified if there is a custom config class.\r\n- [x] Generate the metadata file `dataset_infos.json` for all configurations\r\n- [x] Generate the dummy data `dummy_data.zip` files to have the dataset script tested and that they don't weigh too much (<50KB)\r\n- [x] Add the dataset card `README.md` using the template : fill the tags and the various paragraphs\r\n- [x] Both tests for the real data and the dummy data pass.\r\n\r\n\r\n**Note**: I haven't included the following from the data files: `entities` (the file containing list of all entities in the first three subtasks), `dictionary`(the dictionary of words they use in their models), `movie_kb`(contains the knowledge base of information about the movies, actors and other entities that are mentioned in the dialogs). Please let me know if those are needed, and if yes, should I make separate configurations for them?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2050\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2050","id":831006551,"node_id":"MDU6SXNzdWU4MzEwMDY1NTE=","number":2050,"title":"Build custom dataset to fine-tune Wav2Vec2","user":{"login":"Omarnabk","id":72882909,"node_id":"MDQ6VXNlcjcyODgyOTA5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/72882909?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Omarnabk","html_url":"https:\/\/github.com\/Omarnabk","followers_url":"https:\/\/api.github.com\/users\/Omarnabk\/followers","following_url":"https:\/\/api.github.com\/users\/Omarnabk\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Omarnabk\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Omarnabk\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Omarnabk\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Omarnabk\/orgs","repos_url":"https:\/\/api.github.com\/users\/Omarnabk\/repos","events_url":"https:\/\/api.github.com\/users\/Omarnabk\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Omarnabk\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-13T22:01:10Z","updated_at":"2021-03-15T09:27:28Z","closed_at":"2021-03-15T09:27:28Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Thank you for your recent tutorial on how to finetune Wav2Vec2 on a custom dataset. The example you gave here (https:\/\/huggingface.co\/blog\/fine-tune-xlsr-wav2vec2) was on the CommonVoice dataset. However, what if I want to load my own dataset? I have a manifest (transcript and their audio files) in a JSON file. \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2049\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2049","id":830978687,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkyNDE2MzQ0","number":2049,"title":"Fix text-classification tags","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-13T19:51:42Z","updated_at":"2021-03-16T15:47:46Z","closed_at":"2021-03-16T15:47:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2049","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2049","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2049.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2049.patch"},"body":"There are different tags for text classification right now: `text-classification` and `text_classification`:\r\n![image](https:\/\/user-images.githubusercontent.com\/29076344\/111042457-856bdf00-8463-11eb-93c9-50a30106a1a1.png).\r\n\r\nThis PR fixes it.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2048\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2048","id":830953431,"node_id":"MDU6SXNzdWU4MzA5NTM0MzE=","number":2048,"title":"github is not always available - probably need a back up","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-13T18:03:32Z","updated_at":"2021-03-13T18:03:32Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Yesterday morning github wasn't working:\r\n\r\n```\r\n:\/tmp$ wget https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.4.1\/metrics\/sacrebleu\/sacrebleu.py--2021-03-12 18:35:59-- https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.4.1\/metrics\/sacrebleu\/sacrebleu.py\r\nResolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.108.133, 185.199.111.133, 185.199.109.133, ...\r\nConnecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.108.133|:443... connected.\r\nHTTP request sent, awaiting response... 500 Internal Server Error\r\n2021-03-12 18:36:11 ERROR 500: Internal Server Error.\r\n```\r\n\r\nSuggestion: have a failover system and replicate the data on another system and reach there if gh isn't reachable? perhaps gh can be a master and the replicate a slave - so there is only one true source.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2047\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2047","id":830626430,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkyMTI2NzQ3","number":2047,"title":"Multilingual dIalogAct benchMark (miam)","user":{"login":"eusip","id":1551356,"node_id":"MDQ6VXNlcjE1NTEzNTY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1551356?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/eusip","html_url":"https:\/\/github.com\/eusip","followers_url":"https:\/\/api.github.com\/users\/eusip\/followers","following_url":"https:\/\/api.github.com\/users\/eusip\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/eusip\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/eusip\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/eusip\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/eusip\/orgs","repos_url":"https:\/\/api.github.com\/users\/eusip\/repos","events_url":"https:\/\/api.github.com\/users\/eusip\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/eusip\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-12T23:02:55Z","updated_at":"2021-03-23T10:36:34Z","closed_at":"2021-03-19T10:47:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2047","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2047","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2047.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2047.patch"},"body":"My collaborators (@EmileChapuis, @PierreColombo) and I within the Affective Computing team at Telecom Paris would like to anonymously publish the miam dataset. It is assocated with a publication currently under review. We will update the dataset with full citations once the review period is over.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2046\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2046","id":830423033,"node_id":"MDU6SXNzdWU4MzA0MjMwMzM=","number":2046,"title":"add_faisis_index gets very slow when doing it interatively ","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-03-12T20:27:18Z","updated_at":"2021-03-24T22:29:11Z","closed_at":"2021-03-24T22:29:11Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"As the below code suggests, I want to run add_faisis_index in every nth interaction from the training loop. I have 7.2 million documents. Usually, it takes 2.5 hours (if I run an as a separate process similar to the script given in rag\/use_own_knowleldge_dataset.py). Now, this takes usually 5hrs. Is this normal? Any way to make this process faster? \r\n\r\n@lhoestq \r\n\r\n```\r\n def training_step(self, batch, batch_idx) -> Dict:\r\n\r\n \r\n if (not batch_idx==0) and (batch_idx%5==0):\r\n\r\n print(\"******************************************************\")\r\n ctx_encoder=self.trainer.model.module.module.model.rag.ctx_encoder\r\n model_copy =type(ctx_encoder)(self.config_dpr) # get a new instance #this will be load in the CPU\r\n model_copy.load_state_dict(ctx_encoder.state_dict()) # copy weights and stuff\r\n\r\n\r\n list_of_gpus = ['cuda:2','cuda:3']\r\n c_dir='\/custom\/cache\/dir'\r\n\r\n kb_dataset = load_dataset(\"csv\", data_files=[self.custom_config.csv_path], split=\"train\", delimiter=\"\\t\", column_names=[\"title\", \"text\"],cache_dir=c_dir) \r\n\r\n print(kb_dataset)\r\n\r\n \r\n n=len(list_of_gpus) #nunber of dedicated GPUs\r\n kb_list=[kb_dataset.shard(n, i, contiguous=True) for i in range(n)]\r\n\r\n #kb_dataset.save_to_disk('\/hpc\/gsir059\/MY-Test\/RAY\/transformers\/examples\/research_projects\/rag\/haha-dir')\r\n\r\n\r\n print(self.trainer.global_rank)\r\n dataset_shards = self.re_encode_kb(model_copy.to(device=list_of_gpus[self.trainer.global_rank]),kb_list[self.trainer.global_rank])\r\n output = [None for _ in list_of_gpus]\r\n\r\n #self.trainer.accelerator_connector.accelerator.barrier(\"embedding_process\")\r\n dist.all_gather_object(output, dataset_shards)\r\n \r\n\r\n #This creation and re-initlaization of the new index\r\n if (self.trainer.global_rank==0): #saving will be done in the main process \r\n \r\n combined_dataset = concatenate_datasets(output)\r\n \r\n passages_path =self.config.passages_path\r\n\r\n logger.info(\"saving the dataset with \")\r\n #combined_dataset.save_to_disk('\/hpc\/gsir059\/MY-Test\/RAY\/transformers\/examples\/research_projects\/rag\/MY-Passage')\r\n combined_dataset.save_to_disk(passages_path)\r\n logger.info(\"Add faiss index to the dataset that consist of embeddings\") \r\n\r\n \r\n embedding_dataset=combined_dataset\r\n index = faiss.IndexHNSWFlat(768, 128, faiss.METRIC_INNER_PRODUCT)\r\n embedding_dataset.add_faiss_index(\"embeddings\", custom_index=index)\r\n\r\n embedding_dataset.get_index(\"embeddings\").save(self.config.index_path)\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2045\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2045","id":830351527,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxODc2Mjcz","number":2045,"title":"Preserve column ordering in Dataset.rename_column","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-12T18:26:47Z","updated_at":"2021-03-16T14:48:05Z","closed_at":"2021-03-16T14:35:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2045","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2045","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2045.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2045.patch"},"body":"Currently `Dataset.rename_column` doesn't necessarily preserve the order of the columns:\r\n```python\r\n>>> from datasets import Dataset\r\n>>> d = Dataset.from_dict({'sentences': [\"s1\", \"s2\"], 'label': [0, 1]})\r\n>>> d\r\nDataset({\r\n features: ['sentences', 'label'],\r\n num_rows: 2\r\n})\r\n>>> d.rename_column('sentences', 'text')\r\nDataset({\r\n features: ['label', 'text'],\r\n num_rows: 2\r\n})\r\n```\r\nThis PR fixes this.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2044\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2044","id":830339905,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxODY2NzM1","number":2044,"title":"Add CBT dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-12T18:04:19Z","updated_at":"2021-03-19T11:10:13Z","closed_at":"2021-03-19T10:29:15Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2044","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2044","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2044.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2044.patch"},"body":"This PR adds the [CBT Dataset](https:\/\/arxiv.org\/abs\/1511.02301).\r\n\r\nNote that I have also added the `raw` dataset as a separate configuration. I couldn't find a suitable \"task\" for it in YAML tags.\r\n\r\nThe dummy files have one example each, as the examples are slightly big. For `raw` dataset, I just used top few lines, because they are entire books and would take up a lot of space.\r\n\r\nLet me know in case of any issues.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2043\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2043","id":830279098,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxODE1ODAz","number":2043,"title":"Support pickle protocol for dataset splits defined as ReadInstruction","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-12T16:35:11Z","updated_at":"2021-03-16T14:25:38Z","closed_at":"2021-03-16T14:05:05Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2043","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2043","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2043.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2043.patch"},"body":"Fixes #2022 (+ some style fixes) ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2042\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2042","id":830190276,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxNzQwNzQ3","number":2042,"title":"Fix arrow memory checks issue in tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-12T14:49:52Z","updated_at":"2021-03-12T15:04:23Z","closed_at":"2021-03-12T15:04:22Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2042","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2042","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2042.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2042.patch"},"body":"The tests currently fail on `master` because the arrow memory verification doesn't return the expected memory evolution when loading an arrow table in memory.\r\nFrom my experiments, the tests fail only when the full test suite is ran.\r\nThis made me think that maybe some arrow objects from other tests were not freeing their memory until they do and cause the memory verifications to fail in other tests.\r\n\r\nCollecting the garbage collector before checking the arrow memory usage seems to fix this issue.\r\nI added a context manager `assert_arrow_memory_increases` that we can use in tests and that deals with the gc.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2041\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2041","id":830180803,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxNzMyNzMw","number":2041,"title":"Doc2dial update data_infos and data_loaders","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-12T14:39:29Z","updated_at":"2021-03-16T11:09:20Z","closed_at":"2021-03-16T11:09:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2041","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2041","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2041.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2041.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2040\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2040","id":830169387,"node_id":"MDU6SXNzdWU4MzAxNjkzODc=","number":2040,"title":"ValueError: datasets' indices [1] come from memory and datasets' indices [0] come from disk","user":{"login":"simonschoe","id":53626067,"node_id":"MDQ6VXNlcjUzNjI2MDY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53626067?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/simonschoe","html_url":"https:\/\/github.com\/simonschoe","followers_url":"https:\/\/api.github.com\/users\/simonschoe\/followers","following_url":"https:\/\/api.github.com\/users\/simonschoe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/simonschoe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/simonschoe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/simonschoe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/simonschoe\/orgs","repos_url":"https:\/\/api.github.com\/users\/simonschoe\/repos","events_url":"https:\/\/api.github.com\/users\/simonschoe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/simonschoe\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-12T14:27:00Z","updated_at":"2021-08-04T18:00:43Z","closed_at":"2021-08-04T18:00:43Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi there,\r\n\r\nI am trying to concat two datasets that I've previously saved to disk via `save_to_disk()` like so (note that both are saved as `DataDict`, `PATH_DATA_CLS_*` are `Path`-objects):\r\n```python\r\nconcatenate_datasets([load_from_disk(PATH_DATA_CLS_A)['train'], load_from_disk(PATH_DATA_CLS_B)['train']])\r\n```\r\nYielding the following error:\r\n```python\r\nValueError: Datasets' indices should ALL come from memory, or should ALL come from disk.\r\nHowever datasets' indices [1] come from memory and datasets' indices [0] come from disk.\r\n```\r\nBeen trying to solve this for quite some time now. Both `DataDict` have been created by reading in a `csv` via `load_dataset` and subsequently processed using the various `datasets` methods (i.e. filter, map, remove col, rename col). Can't figure out tho...\r\n\r\n`load_from_disk(PATH_DATA_CLS_A)['train']` yields:\r\n```python\r\nDataset({\r\n features: ['labels', 'text'],\r\n num_rows: 785\r\n})\r\n```\r\n`load_from_disk(PATH_DATA_CLS_B)['train']` yields:\r\n```python\r\nDataset({\r\n features: ['labels', 'text'],\r\n num_rows: 3341\r\n})\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2039\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2039","id":830047652,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxNjE3ODY3","number":2039,"title":"Doc2dial rc","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-12T11:56:28Z","updated_at":"2021-03-12T15:32:36Z","closed_at":"2021-03-12T15:32:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2039","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2039","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2039.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2039.patch"},"body":"Added fix to handle the last turn that is a user turn.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2038\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2038","id":830036875,"node_id":"MDU6SXNzdWU4MzAwMzY4NzU=","number":2038,"title":"outdated dataset_infos.json might fail verifications","user":{"login":"songfeng","id":2062185,"node_id":"MDQ6VXNlcjIwNjIxODU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2062185?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/songfeng","html_url":"https:\/\/github.com\/songfeng","followers_url":"https:\/\/api.github.com\/users\/songfeng\/followers","following_url":"https:\/\/api.github.com\/users\/songfeng\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/songfeng\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/songfeng\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/songfeng\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/songfeng\/orgs","repos_url":"https:\/\/api.github.com\/users\/songfeng\/repos","events_url":"https:\/\/api.github.com\/users\/songfeng\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/songfeng\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-12T11:41:54Z","updated_at":"2021-03-16T16:27:40Z","closed_at":"2021-03-16T16:27:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"The [doc2dial\/dataset_infos.json](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/doc2dial\/dataset_infos.json) is outdated. It would fail data_loader when verifying download checksum etc..\r\n\r\nCould you please update this file or point me how to update this file?\r\n\r\nThank you.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2037\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2037","id":829919685,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxNTA4MTQz","number":2037,"title":"Fix: Wikipedia - save memory by replacing root.clear with elem.clear","user":{"login":"miyamonz","id":6331508,"node_id":"MDQ6VXNlcjYzMzE1MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6331508?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/miyamonz","html_url":"https:\/\/github.com\/miyamonz","followers_url":"https:\/\/api.github.com\/users\/miyamonz\/followers","following_url":"https:\/\/api.github.com\/users\/miyamonz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/miyamonz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/miyamonz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/miyamonz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/miyamonz\/orgs","repos_url":"https:\/\/api.github.com\/users\/miyamonz\/repos","events_url":"https:\/\/api.github.com\/users\/miyamonz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/miyamonz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-12T09:22:00Z","updated_at":"2021-03-23T06:08:16Z","closed_at":"2021-03-16T11:01:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2037","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2037","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2037.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2037.patch"},"body":"see: https:\/\/github.com\/huggingface\/datasets\/issues\/2031\r\n\r\nWhat I did:\r\n- replace root.clear with elem.clear\r\n- remove lines to get root element\r\n- $ make style\r\n- $ make test\r\n - some tests required some pip packages, I installed them.\r\n\r\ntest results on origin\/master and my branch are same. I think it's not related on my modification, isn't it?\r\n```\r\n==================================================================================== short test summary info ====================================================================================\r\nFAILED tests\/test_arrow_writer.py::TypedSequenceTest::test_catch_overflow - AssertionError: OverflowError not raised\r\n============================================================= 1 failed, 2332 passed, 5138 skipped, 70 warnings in 91.75s (0:01:31) ==============================================================\r\nmake: *** [Makefile:19: test] Error 1\r\n\r\n```\r\n\r\nIs there anything else I should do?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2036\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2036","id":829909258,"node_id":"MDU6SXNzdWU4Mjk5MDkyNTg=","number":2036,"title":"Cannot load wikitext","user":{"login":"Gpwner","id":19349207,"node_id":"MDQ6VXNlcjE5MzQ5MjA3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19349207?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Gpwner","html_url":"https:\/\/github.com\/Gpwner","followers_url":"https:\/\/api.github.com\/users\/Gpwner\/followers","following_url":"https:\/\/api.github.com\/users\/Gpwner\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Gpwner\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Gpwner\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Gpwner\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Gpwner\/orgs","repos_url":"https:\/\/api.github.com\/users\/Gpwner\/repos","events_url":"https:\/\/api.github.com\/users\/Gpwner\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Gpwner\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-12T09:09:39Z","updated_at":"2021-03-15T08:45:02Z","closed_at":"2021-03-15T08:44:44Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"when I execute these codes\r\n```\r\n>>> from datasets import load_dataset\r\n>>> test_dataset = load_dataset(\"wikitext\")\r\n```\r\n\r\nI got an error,any help?\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/xxx\/anaconda3\/envs\/transformer\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 589, in load_dataset\r\n path, script_version=script_version, download_config=download_config, download_mode=download_mode, dataset=True\r\n File \"\/home\/xxx\/anaconda3\/envs\/transformer\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 267, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/home\/xxx\/anaconda3\/envs\/transformer\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 308, in cached_path\r\n use_etag=download_config.use_etag,\r\n File \"\/home\/xxx\/anaconda3\/envs\/transformer\/lib\/python3.7\/site-packages\/datasets\/utils\/file_utils.py\", line 487, in get_from_cache\r\n raise ConnectionError(\"Couldn't reach {}\".format(url))\r\nConnectionError: Couldn't reach https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/1.1.3\/datasets\/wikitext\/wikitext.py\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2035\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2035","id":829475544,"node_id":"MDU6SXNzdWU4Mjk0NzU1NDQ=","number":2035,"title":"wiki40b\/wikipedia for almost all languages cannot be downloaded","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-03-11T19:54:54Z","updated_at":"2021-03-16T14:53:37Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI am trying to download the data as below:\r\n\r\n```\r\nfrom datasets import load_dataset\r\ndataset = load_dataset(\"wiki40b\", \"cs\")\r\nprint(dataset)\r\n```\r\n\r\nI am getting this error. @lhoestq I will be grateful if you could assist me with this error. For almost all languages except english I am getting this error.\r\n\r\nI really need majority of languages in this dataset to be able to train my models for a deadline and your great scalable super well-written library is my only hope to train the models at scale while being low on resources. \r\n\r\nthank you very much.\r\n\r\n```\r\n(fast) dara@vgne046:\/user\/dara\/dev\/codes\/seq2seq$ python test_data.py\r\nDownloading and preparing dataset wiki40b\/cs (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to temp\/dara\/cache_home_2\/datasets\/wiki40b\/cs\/1.1.0\/063778187363ffb294896eaa010fc254b42b73e31117c71573a953b0b0bf010f...\r\nTraceback (most recent call last):\r\n File \"test_data.py\", line 3, in \r\n dataset = load_dataset(\"wiki40b\", \"cs\")\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 746, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 579, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 1105, in _download_and_prepare\r\n import apache_beam as beam\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/apache_beam-2.28.0-py3.7-linux-x86_64.egg\/apache_beam\/__init__.py\", line 96, in \r\n from apache_beam import io\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/apache_beam-2.28.0-py3.7-linux-x86_64.egg\/apache_beam\/io\/__init__.py\", line 23, in \r\n from apache_beam.io.avroio import *\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/apache_beam-2.28.0-py3.7-linux-x86_64.egg\/apache_beam\/io\/avroio.py\", line 55, in \r\n import avro\r\n File \"\", line 983, in _find_and_load\r\n File \"\", line 967, in _find_and_load_unlocked\r\n File \"\", line 668, in _load_unlocked\r\n File \"\", line 638, in _load_backward_compatible\r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/avro_python3-1.9.2.1-py3.7.egg\/avro\/__init__.py\", line 34, in \r\n File \"\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/avro_python3-1.9.2.1-py3.7.egg\/avro\/__init__.py\", line 30, in LoadResource\r\nNotADirectoryError: [Errno 20] Not a directory: '\/user\/dara\/libs\/anaconda3\/envs\/fast\/lib\/python3.7\/site-packages\/avro_python3-1.9.2.1-py3.7.egg\/avro\/VERSION.txt'\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2034\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2034","id":829381388,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkxMDU2MTEw","number":2034,"title":"Fix typo","user":{"login":"pcyin","id":3413464,"node_id":"MDQ6VXNlcjM0MTM0NjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3413464?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pcyin","html_url":"https:\/\/github.com\/pcyin","followers_url":"https:\/\/api.github.com\/users\/pcyin\/followers","following_url":"https:\/\/api.github.com\/users\/pcyin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pcyin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pcyin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pcyin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pcyin\/orgs","repos_url":"https:\/\/api.github.com\/users\/pcyin\/repos","events_url":"https:\/\/api.github.com\/users\/pcyin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pcyin\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-11T17:46:13Z","updated_at":"2021-03-11T18:06:25Z","closed_at":"2021-03-11T18:06:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2034","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2034","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2034.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2034.patch"},"body":"Change `ENV_XDG_CACHE_HOME ` to `XDG_CACHE_HOME `","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2033\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2033","id":829295339,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkwOTgzMDAy","number":2033,"title":"Raise an error for outdated sacrebleu versions","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-11T16:08:00Z","updated_at":"2021-03-11T17:58:12Z","closed_at":"2021-03-11T17:58:12Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2033","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2033","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2033.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2033.patch"},"body":"The `sacrebleu` metric seem to only work for sacrecleu>=1.4.12\r\n\r\nFor example using sacrebleu==1.2.10, an error is raised (from metric\/sacrebleu\/sacrebleu.py):\r\n```python\r\n def _compute(\r\n self,\r\n predictions,\r\n references,\r\n smooth_method=\"exp\",\r\n smooth_value=None,\r\n force=False,\r\n lowercase=False,\r\n tokenize=scb.DEFAULT_TOKENIZER,\r\n use_effective_order=False,\r\n ):\r\n references_per_prediction = len(references[0])\r\n if any(len(refs) != references_per_prediction for refs in references):\r\n raise ValueError(\"Sacrebleu requires the same number of references for each prediction\")\r\n transformed_references = [[refs[i] for refs in references] for i in range(references_per_prediction)]\r\n> output = scb.corpus_bleu(\r\n sys_stream=predictions,\r\n ref_streams=transformed_references,\r\n smooth_method=smooth_method,\r\n smooth_value=smooth_value,\r\n force=force,\r\n lowercase=lowercase,\r\n tokenize=tokenize,\r\n use_effective_order=use_effective_order,\r\n )\r\n\r\nE TypeError: corpus_bleu() got an unexpected keyword argument 'smooth_method'\r\n\/mnt\/cache\/modules\/datasets_modules\/metrics\/sacrebleu\/b390045b3d1dd4abf6a95c4a2a11ee3bcc2b7620b076204d0ddc353fa649fd86\/sacrebleu.py:114: TypeError\r\n```\r\n\r\nI improved the error message when users have an outdated version of sacrebleu.\r\nThe new error message tells the user to update sacrebleu.\r\ncc @LysandreJik ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2032\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2032","id":829250912,"node_id":"MDU6SXNzdWU4MjkyNTA5MTI=","number":2032,"title":"Use Arrow filtering instead of writing a new arrow file for Dataset.filter","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"open","locked":false,"assignee":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"assignees":[{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":0,"created_at":"2021-03-11T15:18:50Z","updated_at":"2021-03-11T17:20:57Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"Currently the filter method reads the dataset batch by batch to write a new, filtered, arrow file on disk. Therefore all the reading + writing can take some time.\r\n\r\nUsing a mask directly on the arrow table doesn't do any read or write operation therefore it's significantly quicker.\r\n\r\nI think there are two cases:\r\n- if the dataset doesn't have an indices mapping, then one can simply use the arrow filtering on the main arrow table `dataset._data.filter(...)`\r\n- if the dataset an indices mapping, then the mask should be applied on the indices mapping table `dataset._indices.filter(...)`\r\n\r\nThe indices mapping is used to map between the idx at `dataset[idx]` in `__getitem__` and the idx in the actual arrow table.\r\n\r\nThe new filter method should therefore be faster, and allow users to pass either a filtering function (that returns a boolean given an example), or directly a mask.\r\n\r\nFeel free to discuss this idea in this thread :)\r\n\r\nOne additional note: the refactor at #2025 would make all the pickle-related stuff work directly with the arrow filtering, so that we only need to change the Dataset.filter method without having to deal with pickle.\r\n\r\ncc @theo-m @gchhablani \r\n\r\nrelated issues: #1796 #1949 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2031\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2031","id":829122778,"node_id":"MDU6SXNzdWU4MjkxMjI3Nzg=","number":2031,"title":"wikipedia.py generator that extracts XML doesn't release memory","user":{"login":"miyamonz","id":6331508,"node_id":"MDQ6VXNlcjYzMzE1MDg=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6331508?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/miyamonz","html_url":"https:\/\/github.com\/miyamonz","followers_url":"https:\/\/api.github.com\/users\/miyamonz\/followers","following_url":"https:\/\/api.github.com\/users\/miyamonz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/miyamonz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/miyamonz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/miyamonz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/miyamonz\/orgs","repos_url":"https:\/\/api.github.com\/users\/miyamonz\/repos","events_url":"https:\/\/api.github.com\/users\/miyamonz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/miyamonz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-11T12:51:24Z","updated_at":"2021-03-22T08:33:52Z","closed_at":"2021-03-22T08:33:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I tried downloading Japanese wikipedia, but it always failed because of out of memory maybe.\r\n\r\nI found that the generator function that extracts XML data in wikipedia.py doesn't release memory in the loop.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/13a5b7db992ad5cf77895e4c0f76595314390418\/datasets\/wikipedia\/wikipedia.py#L464-L502\r\n\r\n`root.clear()` intend to clear memory, but it doesn't.\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/13a5b7db992ad5cf77895e4c0f76595314390418\/datasets\/wikipedia\/wikipedia.py#L490\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/13a5b7db992ad5cf77895e4c0f76595314390418\/datasets\/wikipedia\/wikipedia.py#L494\r\nI replaced them with `elem.clear()`, then it seems to work correctly.\r\n\r\nhere is the notebook to reproduce it.\r\nhttps:\/\/gist.github.com\/miyamonz\/dc06117302b6e85fa51cbf46dde6bb51#file-xtract_content-ipynb","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2030\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2030","id":829110803,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkwODI4NzQ4","number":2030,"title":"Implement Dataset from text","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-11T12:34:50Z","updated_at":"2021-03-18T13:29:29Z","closed_at":"2021-03-18T13:29:29Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2030","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2030","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2030.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2030.patch"},"body":"Implement `Dataset.from_text`.\r\n\r\nAnalogue to #1943, #1946.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2029\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2029","id":829097290,"node_id":"MDU6SXNzdWU4MjkwOTcyOTA=","number":2029,"title":"Loading a faiss index KeyError","user":{"login":"nbroad1881","id":24982805,"node_id":"MDQ6VXNlcjI0OTgyODA1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/24982805?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nbroad1881","html_url":"https:\/\/github.com\/nbroad1881","followers_url":"https:\/\/api.github.com\/users\/nbroad1881\/followers","following_url":"https:\/\/api.github.com\/users\/nbroad1881\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nbroad1881\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nbroad1881\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nbroad1881\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nbroad1881\/orgs","repos_url":"https:\/\/api.github.com\/users\/nbroad1881\/repos","events_url":"https:\/\/api.github.com\/users\/nbroad1881\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nbroad1881\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-11T12:16:13Z","updated_at":"2021-03-12T00:21:09Z","closed_at":"2021-03-12T00:21:09Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I've recently been testing out RAG and DPR embeddings, and I've run into an issue that is not apparent in the documentation.\r\n\r\nThe basic steps are:\r\n\r\n1. Create a dataset (dataset1)\r\n2. Create an embeddings column using DPR\r\n3. Add a faiss index to the dataset\r\n4. Save faiss index to a file\r\n5. Create a new dataset (dataset2) with the same text and label information as dataset1\r\n6. Try to load the faiss index from file to dataset2\r\n7. Get `KeyError: \"Column embeddings not in the dataset\"`\r\n\r\nI've made a colab notebook that should show exactly what I did. Please switch to GPU runtime; I didn't check on CPU.\r\n\r\nhttps:\/\/colab.research.google.com\/drive\/1X0S9ZuZ8k0ybcoei4w7so6dS_WrABmIx?usp=sharing\r\n\r\nUbuntu Version\r\nVERSION=\"18.04.5 LTS (Bionic Beaver)\"\r\n\r\ndatasets==1.4.1\r\nfaiss==1.5.3\r\nfaiss-gpu==1.7.0\r\ntorch==1.8.0+cu101\r\ntransformers==4.3.3\r\n\r\nNVIDIA-SMI 460.56\r\nDriver Version: 460.32.03\r\nCUDA Version: 11.2 \r\nTesla K80 \r\n\r\nI was basically following the steps here: https:\/\/huggingface.co\/docs\/datasets\/faiss_and_ea.html#adding-a-faiss-index\r\n\r\nI included the exact code from the documentation at the end of the notebook to show that they don't work either.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2028\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2028","id":828721393,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkwNDk1NzEx","number":2028,"title":"Adding PersiNLU reading-comprehension","user":{"login":"danyaljj","id":2441454,"node_id":"MDQ6VXNlcjI0NDE0NTQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2441454?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/danyaljj","html_url":"https:\/\/github.com\/danyaljj","followers_url":"https:\/\/api.github.com\/users\/danyaljj\/followers","following_url":"https:\/\/api.github.com\/users\/danyaljj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/danyaljj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/danyaljj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/danyaljj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/danyaljj\/orgs","repos_url":"https:\/\/api.github.com\/users\/danyaljj\/repos","events_url":"https:\/\/api.github.com\/users\/danyaljj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/danyaljj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-11T04:41:13Z","updated_at":"2021-03-15T09:39:57Z","closed_at":"2021-03-15T09:39:57Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2028","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2028","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2028.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2028.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2027\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2027","id":828490444,"node_id":"MDExOlB1bGxSZXF1ZXN0NTkwMjkzNDA1","number":2027,"title":"Update format columns in Dataset.rename_columns","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-10T23:50:59Z","updated_at":"2021-03-11T14:38:40Z","closed_at":"2021-03-11T14:38:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2027","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2027","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2027.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2027.patch"},"body":"Fixes #2026 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2026\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2026","id":828194467,"node_id":"MDU6SXNzdWU4MjgxOTQ0Njc=","number":2026,"title":"KeyError on using map after renaming a column","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-10T18:54:17Z","updated_at":"2021-03-11T14:39:34Z","closed_at":"2021-03-11T14:38:40Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nI'm trying to use `cifar10` dataset. I want to rename the `img` feature to `image` in order to make it consistent with `mnist`, which I'm also planning to use. By doing this, I was trying to avoid modifying `prepare_train_features` function.\r\n\r\nHere is what I try:\r\n\r\n```python\r\ntransform = Compose([ToPILImage(),ToTensor(),Normalize([0.0,0.0,0.0],[1.0,1.0,1.0])])\r\ndef prepare_features(examples):\r\n images = []\r\n labels = []\r\n print(examples)\r\n for example_idx, example in enumerate(examples[\"image\"]):\r\n if transform is not None:\r\n images.append(transform(examples[\"image\"][example_idx].permute(2,0,1)))\r\n else:\r\n images.append(examples[\"image\"][example_idx].permute(2,0,1))\r\n labels.append(examples[\"label\"][example_idx])\r\n output = {\"label\":labels, \"image\":images}\r\n return output\r\n\r\nraw_dataset = load_dataset('cifar10')\r\nraw_dataset.set_format('torch',columns=['img','label'])\r\nraw_dataset = raw_dataset.rename_column('img','image')\r\n\r\nfeatures = datasets.Features({\r\n \"image\": datasets.Array3D(shape=(3,32,32),dtype=\"float32\"),\r\n \"label\": datasets.features.ClassLabel(names=[\r\n \"airplane\",\r\n \"automobile\",\r\n \"bird\",\r\n \"cat\",\r\n \"deer\",\r\n \"dog\",\r\n \"frog\",\r\n \"horse\",\r\n \"ship\",\r\n \"truck\",\r\n ]),\r\n })\r\ntrain_dataset = raw_dataset.map(prepare_features, features = features,batched=True, batch_size=10000)\r\n```\r\nThe error:\r\n```python\r\n---------------------------------------------------------------------------\r\nKeyError Traceback (most recent call last)\r\n in ()\r\n 14 ]),\r\n 15 })\r\n---> 16 train_dataset = raw_dataset.map(prepare_features, features = features,batched=True, batch_size=10000)\r\n\r\n2 frames\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in map(self, function, with_indices, input_columns, batched, batch_size, drop_last_batch, remove_columns, keep_in_memory, load_from_cache_file, cache_file_name, writer_batch_size, features, disable_nullable, fn_kwargs, num_proc, suffix_template, new_fingerprint)\r\n 1287 test_inputs = self[:2] if batched else self[0]\r\n 1288 test_indices = [0, 1] if batched else 0\r\n-> 1289 update_data = does_function_return_dict(test_inputs, test_indices)\r\n 1290 logger.info(\"Testing finished, running the mapping function on the dataset\")\r\n 1291 \r\n\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/arrow_dataset.py in does_function_return_dict(inputs, indices)\r\n 1258 fn_args = [inputs] if input_columns is None else [inputs[col] for col in input_columns]\r\n 1259 processed_inputs = (\r\n-> 1260 function(*fn_args, indices, **fn_kwargs) if with_indices else function(*fn_args, **fn_kwargs)\r\n 1261 )\r\n 1262 does_return_dict = isinstance(processed_inputs, Mapping)\r\n\r\n in prepare_features(examples)\r\n 3 labels = []\r\n 4 print(examples)\r\n----> 5 for example_idx, example in enumerate(examples[\"image\"]):\r\n 6 if transform is not None:\r\n 7 images.append(transform(examples[\"image\"][example_idx].permute(2,0,1)))\r\n\r\nKeyError: 'image'\r\n```\r\n\r\nThe print statement inside returns this:\r\n```python\r\n{'label': tensor([6, 9])}\r\n```\r\nApparently, both `img` and `image` do not exist after renaming. \r\n\r\nNote that this code works fine with `img` everywhere.\r\n\r\nNotebook: https:\/\/colab.research.google.com\/drive\/1SzESAlz3BnVYrgQeJ838vbMp1OsukiA2?usp=sharing\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2025\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2025","id":828047476,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg5ODk2NjMz","number":2025,"title":"[Refactor] Use in-memory\/memory-mapped\/concatenation tables in Dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":16,"created_at":"2021-03-10T17:00:47Z","updated_at":"2021-03-30T14:46:53Z","closed_at":"2021-03-26T16:51:59Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2025","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2025","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2025.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2025.patch"},"body":"## Intro\r\n\r\nCurrently there is one assumption that we need to change: a dataset is either fully in memory (dataset._data_files is empty), or the dataset can be reloaded from disk with memory mapping (using the dataset._data_files).\r\nThis assumption is used for pickling for example:\r\n- in-memory dataset can just be pickled\/unpickled in-memory\r\n- on-disk dataset can be unloaded to only keep the filepaths when pickling, and then reloaded from the disk when unpickling\r\n\r\n## Issues\r\n\r\nBecause of this assumption, we can't easily implement methods like `Dataset.add_item` to append more rows to a dataset, or `dataset.add_column` to add a column, since we can't mix data from memory and data from the disk.\r\nMoreover, `concatenate_datasets` doesn't work if the datasets to concatenate are not all from memory, or all form the disk.\r\n\r\n## Solution provided in this PR\r\n\r\nI changed this by allowing several types of Table to be used in the Dataset object.\r\nMore specifically I added three pyarrow Table wrappers: InMemoryTable, MemoryMappedTable and ConcatenationTable.\r\nThe in-memory and memory-mapped tables implement the pickling behavior described above.\r\nThe ConcatenationTable can be made from several tables (either in-memory or memory mapped) called \"blocks\". Pickling a ConcatenationTable simply pickles the underlying blocks.\r\n\r\n## Implementation details\r\n\r\nThe three tables classes mentioned above all inherit from a `Table` class defined in `table.py`, which is a wrapper of a pyarrow table. The `Table` wrapper implements all the attributes and methods of the underlying pyarrow table.\r\n\r\nRegarding the MemoryMappedTable:\r\nReloading a pyarrow table from the disk makes you lose all the changes you may have applied (slice, rename_columns, drop, cast etc.). Therefore the MemoryMappedTable implements a \"replay\" mechanism to re-apply the changes when reloading the pyarrow table from the disk.\r\n\r\n## Checklist\r\n\r\n- [x] add InMemoryTable\r\n- [x] add MemoryMappedTable\r\n- [x] add ConcatenationTable\r\n- [x] Update the ArrowReader to use these new tables depending on the `in_memory` parameter\r\n- [x] Update Dataset.from_xxx methods\r\n- [x] Update load_from_disk and save_to_disk\r\n- [x] Backward compatibility of load_from_disk\r\n- [x] Add tests for the new tables\r\n- [x] Update current tests\r\n- [ ] Documentation\r\n\r\n----------\r\n\r\nI would be happy to discuss the design of this PR :)\r\n\r\nClose #1877 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2024\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2024","id":827842962,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg5NzEzNDAy","number":2024,"title":"Remove print statement from mnist.py","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-10T14:39:58Z","updated_at":"2021-03-11T18:03:52Z","closed_at":"2021-03-11T18:03:51Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2024","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2024","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2024.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2024.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2023\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2023","id":827819608,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg5NjkyNDU2","number":2023,"title":"Add Romanian to XQuAD","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-10T14:24:32Z","updated_at":"2021-03-15T10:08:17Z","closed_at":"2021-03-15T10:08:17Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2023","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2023","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2023.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2023.patch"},"body":"On Jan 18, XQuAD was updated with a new Romanian validation file ([xquad commit link](https:\/\/github.com\/deepmind\/xquad\/commit\/60cac411649156efb6aab9dd4c9cde787a2c0345))\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2022\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2022","id":827435033,"node_id":"MDU6SXNzdWU4Mjc0MzUwMzM=","number":2022,"title":"ValueError when rename_column on splitted dataset","user":{"login":"simonschoe","id":53626067,"node_id":"MDQ6VXNlcjUzNjI2MDY3","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/53626067?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/simonschoe","html_url":"https:\/\/github.com\/simonschoe","followers_url":"https:\/\/api.github.com\/users\/simonschoe\/followers","following_url":"https:\/\/api.github.com\/users\/simonschoe\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/simonschoe\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/simonschoe\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/simonschoe\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/simonschoe\/orgs","repos_url":"https:\/\/api.github.com\/users\/simonschoe\/repos","events_url":"https:\/\/api.github.com\/users\/simonschoe\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/simonschoe\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-10T09:40:38Z","updated_at":"2021-03-16T14:06:08Z","closed_at":"2021-03-16T14:05:05Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi there,\r\nI am loading `.tsv` file via `load_dataset` and subsequently split the rows into training and test set via the `ReadInstruction` API like so:\r\n\r\n```python\r\nsplit = {\r\n 'train': ReadInstruction('train', to=90, unit='%'),\r\n 'test': ReadInstruction('train', from_=-10, unit='%')\r\n}\r\n\r\ndataset = load_dataset(\r\n path='csv', # use 'text' loading script to load from local txt-files\r\n delimiter='\\t', # xxx\r\n data_files=text_files, # list of paths to local text files\r\n split=split, # xxx\r\n)\r\n\r\ndataset\r\n```\r\n\r\nPart of output:\r\n```python\r\nDatasetDict({\r\n train: Dataset({\r\n features: ['sentence', 'sentiment'],\r\n num_rows: 900\r\n })\r\n test: Dataset({\r\n features: ['sentence', 'sentiment'],\r\n num_rows: 100\r\n })\r\n})\r\n```\r\nAfterwards I'd like to rename the 'sentence' column to 'text' in order to be compatible with my modelin pipeline. If I run the following code I experience a `ValueError` however:\r\n```python\r\ndataset['train'].rename_column('sentence', 'text')\r\n```\r\n```python\r\n\/usr\/local\/lib\/python3.7\/dist-packages\/datasets\/splits.py in __init__(self, name)\r\n 353 for split_name in split_names_from_instruction:\r\n 354 if not re.match(_split_re, split_name):\r\n--> 355 raise ValueError(f\"Split name should match '{_split_re}'' but got '{split_name}'.\")\r\n 356 \r\n 357 def __str__(self):\r\n\r\nValueError: Split name should match '^\\w+(\\.\\w+)*$'' but got 'ReadInstruction('.\r\n```\r\nIn particular, these behavior does not arise if I use the deprecated `rename_column_` method. Any idea what causes the error? Would assume something in the way I defined the split.\r\n\r\nThanks in advance! :)","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2021\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2021","id":826988016,"node_id":"MDU6SXNzdWU4MjY5ODgwMTY=","number":2021,"title":"Interactively doing save_to_disk and load_from_disk corrupts the datasets object?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-10T02:48:34Z","updated_at":"2021-03-13T10:07:41Z","closed_at":"2021-03-13T10:07:41Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":" dataset_info.json file saved after using save_to_disk gets corrupted as follows. \r\n \r\n \r\n![image](https:\/\/user-images.githubusercontent.com\/16892570\/110568474-ed969880-81b7-11eb-832f-2e5129656016.png)\r\n\r\nIs there a way to disable the cache that will save to \/tmp\/huggiface\/datastes ? \r\nI have a feeling there is a serious issue with cashing.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2020\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2020","id":826961126,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4OTE3MjYx","number":2020,"title":"Remove unnecessary docstart check in conll-like datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-10T02:20:16Z","updated_at":"2021-03-11T13:33:37Z","closed_at":"2021-03-11T13:33:37Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2020","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2020","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2020.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2020.patch"},"body":"Related to this PR: #1998\r\n\r\nAdditionally, this PR adds the docstart note to the conll2002 dataset card ([link](https:\/\/raw.githubusercontent.com\/teropa\/nlp\/master\/resources\/corpora\/conll2002\/ned.train) to the raw data with `DOCSTART` lines).\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2019\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2019","id":826625706,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4NjEyODgy","number":2019,"title":"Replace print with logging in dataset scripts","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-09T20:59:34Z","updated_at":"2021-03-12T10:09:01Z","closed_at":"2021-03-11T16:14:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2019","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2019","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2019.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2019.patch"},"body":"Replaces `print(...)` in the dataset scripts with the library logger.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2018\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2018","id":826473764,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4NDc0NTQz","number":2018,"title":"Md gender card update","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-09T18:57:20Z","updated_at":"2021-03-12T17:31:00Z","closed_at":"2021-03-12T17:31:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2018","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2018","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2018.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2018.patch"},"body":"I updated the descriptions of the datasets as they appear in the HF repo and the descriptions of the source datasets according to what I could find from the paper and the references. I'm still a little unclear about some of the fields of the different configs, and there was little info on the word list and name list. I'll contact the authors to see if they have any additional information or suggested changes.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2017\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2017","id":826428578,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4NDMyNDc2","number":2017,"title":"Add TF-based Features to handle different modes of data","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T18:29:52Z","updated_at":"2021-03-17T12:32:08Z","closed_at":"2021-03-17T12:32:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2017","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2017","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2017.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2017.patch"},"body":"Hi,\r\n\r\nI am creating this draft PR to work on add features similar to [TF datasets](https:\/\/github.com\/tensorflow\/datasets\/tree\/master\/tensorflow_datasets\/core\/features). I'll be starting with `Tensor` and `FeatureConnector` classes, and build upon them to add other features as well. This is a work in progress.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2016\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2016","id":825965493,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg4MDA5NjEz","number":2016,"title":"Not all languages have 2 digit codes.","user":{"login":"asiddhant","id":13891775,"node_id":"MDQ6VXNlcjEzODkxNzc1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13891775?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/asiddhant","html_url":"https:\/\/github.com\/asiddhant","followers_url":"https:\/\/api.github.com\/users\/asiddhant\/followers","following_url":"https:\/\/api.github.com\/users\/asiddhant\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/asiddhant\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/asiddhant\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/asiddhant\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/asiddhant\/orgs","repos_url":"https:\/\/api.github.com\/users\/asiddhant\/repos","events_url":"https:\/\/api.github.com\/users\/asiddhant\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/asiddhant\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T13:53:39Z","updated_at":"2021-03-11T18:01:03Z","closed_at":"2021-03-11T18:01:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2016","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2016","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2016.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2016.patch"},"body":".","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2015\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2015","id":825942108,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3OTg4NTQ0","number":2015,"title":"Fix ipython function creation in tests","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T13:36:59Z","updated_at":"2021-03-09T14:06:04Z","closed_at":"2021-03-09T14:06:03Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2015","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2015","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2015.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2015.patch"},"body":"The test at `tests\/test_caching.py::RecurseDumpTest::test_dump_ipython_function` was failing in python 3.8 because the ipython function was not properly created.\r\n\r\nFix #2010 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2014\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2014","id":825916531,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3OTY1NDg3","number":2014,"title":"more explicit method parameters","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T13:18:29Z","updated_at":"2021-03-10T10:08:37Z","closed_at":"2021-03-10T10:08:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2014","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2014","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2014.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2014.patch"},"body":"re: #2009\n\nnot super convinced this is better, and while I usually fight against kwargs here it seems to me that it better conveys the relationship to the `_split_generator` method.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2013\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2013","id":825694305,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3NzYzMTgx","number":2013,"title":"Add Cryptonite dataset","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T10:32:11Z","updated_at":"2021-03-09T19:27:07Z","closed_at":"2021-03-09T19:27:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2013","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2013","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2013.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2013.patch"},"body":"cc @aviaefrat who's the original author of the dataset & paper, see https:\/\/github.com\/aviaefrat\/cryptonite","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2012\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2012","id":825634064,"node_id":"MDU6SXNzdWU4MjU2MzQwNjQ=","number":2012,"title":"No upstream branch","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-03-09T09:48:55Z","updated_at":"2021-03-09T11:33:31Z","closed_at":"2021-03-09T11:33:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Feels like the documentation on adding a new dataset is outdated?\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/987df6b4e9e20fc0c92bc9df48137d170756fd7b\/ADD_NEW_DATASET.md#L49-L54\r\n\r\nThere is no upstream branch on remote. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2011\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2011","id":825621952,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3Njk4MTAx","number":2011,"title":"Add RoSent Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-09T09:40:08Z","updated_at":"2021-03-11T18:00:52Z","closed_at":"2021-03-11T18:00:52Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2011","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2011","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2011.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2011.patch"},"body":"This PR adds a Romanian sentiment analysis dataset. This PR also closes pending PR #1529.\r\n\r\nI had to add an `original_id` feature because the dataset files have repeated IDs. I can remove them if needed. I have also added `id` which is unique.\r\n\r\nLet me know in case of any issues.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2010\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2010","id":825567635,"node_id":"MDU6SXNzdWU4MjU1Njc2MzU=","number":2010,"title":"Local testing fails","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-03-09T09:01:38Z","updated_at":"2021-03-09T14:06:03Z","closed_at":"2021-03-09T14:06:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I'm following the CI setup as described in \r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/8eee4fa9e133fe873a7993ba746d32ca2b687551\/.circleci\/config.yml#L16-L19\r\n\r\nin a new conda environment, at commit https:\/\/github.com\/huggingface\/datasets\/commit\/4de6dbf84e93dad97e1000120d6628c88954e5d4\r\n\r\nand getting\r\n\r\n```\r\nFAILED tests\/test_caching.py::RecurseDumpTest::test_dump_ipython_function - TypeError: an integer is required (got type bytes)\r\n1 failed, 2321 passed, 5109 skipped, 10 warnings in 124.32s (0:02:04)\r\n```\r\n\r\nSeems like a discrepancy with CI, perhaps a lib version that's not controlled? \r\nTried with `pyarrow=={1.0.0,0.17.1,2.0.0}`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2009\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2009","id":825541366,"node_id":"MDU6SXNzdWU4MjU1NDEzNjY=","number":2009,"title":"Ambiguous documentation","user":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892861,"node_id":"MDU6TGFiZWwxOTM1ODkyODYx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/documentation","name":"documentation","color":"0075ca","default":true,"description":"Improvements or additions to documentation"}],"state":"closed","locked":false,"assignee":{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false},"assignees":[{"login":"theo-m","id":17948980,"node_id":"MDQ6VXNlcjE3OTQ4OTgw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17948980?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/theo-m","html_url":"https:\/\/github.com\/theo-m","followers_url":"https:\/\/api.github.com\/users\/theo-m\/followers","following_url":"https:\/\/api.github.com\/users\/theo-m\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/theo-m\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/theo-m\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/theo-m\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/theo-m\/orgs","repos_url":"https:\/\/api.github.com\/users\/theo-m\/repos","events_url":"https:\/\/api.github.com\/users\/theo-m\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/theo-m\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-03-09T08:42:11Z","updated_at":"2021-03-12T15:01:34Z","closed_at":"2021-03-12T15:01:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"https:\/\/github.com\/huggingface\/datasets\/blob\/2ac9a0d24a091989f869af55f9f6411b37ff5188\/templates\/new_dataset_script.py#L156-L158\r\n\r\nLooking at the template, I find this documentation line to be confusing, the method parameters don't include the `gen_kwargs` so I'm unclear where they're coming from.\r\n\r\nHappy to push a PR with a clearer statement when I understand the meaning.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2008\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2008","id":825153804,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg3Mjc1Njk4","number":2008,"title":"Fix various typos\/grammer in the docs","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-09T01:39:28Z","updated_at":"2021-03-15T18:42:49Z","closed_at":"2021-03-09T10:21:32Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2008","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2008","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2008.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2008.patch"},"body":"This PR:\r\n* fixes various typos\/grammer I came across while reading the docs\r\n* adds the \"Install with conda\" installation instructions\r\n\r\nCloses #1959 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2007\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2007","id":824518158,"node_id":"MDU6SXNzdWU4MjQ1MTgxNTg=","number":2007,"title":"How to not load huggingface datasets into memory ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-08T12:35:26Z","updated_at":"2021-08-04T18:02:25Z","closed_at":"2021-08-04T18:02:25Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI am running this example from transformers library version 4.3.3:\r\n(Here is the full documentation https:\/\/github.com\/huggingface\/transformers\/issues\/8771 but the running command should work out of the box)\r\n\r\n USE_TF=0 deepspeed run_seq2seq.py --model_name_or_path google\/mt5-base --dataset_name wmt16 --dataset_config_name ro-en --source_prefix \"translate English to Romanian: \" --task translation_en_to_ro --output_dir \/test\/test_large --do_train --do_eval --predict_with_generate --max_train_samples 500 --max_val_samples 500 --max_source_length 128 --max_target_length 128 --sortish_sampler --per_device_train_batch_size 8 --val_max_target_length 128 --deepspeed ds_config.json --num_train_epochs 1 --eval_steps 25000 --warmup_steps 500 --overwrite_output_dir\r\n\r\n(Here please find the script: https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/seq2seq\/run_seq2seq.py)\r\n\r\nIf you do not pass max_train_samples in above command to load the full dataset, then I get memory issue on a gpu with 24 GigBytes of memory.\r\n \r\nI need to train large-scale mt5 model on large-scale datasets of wikipedia (multiple of them concatenated or other datasets in multiple languages like OPUS), could you help me how I can avoid loading the full data into memory? to make the scripts not related to data size? \r\n\r\nIn above example, I was hoping the script could work without relying on dataset size, so I can still train the model without subsampling training set.\r\n\r\nthank you so much @lhoestq for your great help in advance\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2006\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2006","id":824457794,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2Njg5Nzk2","number":2006,"title":"Don't gitignore dvc.lock","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-08T11:13:08Z","updated_at":"2021-03-08T11:28:35Z","closed_at":"2021-03-08T11:28:34Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2006","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2006","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2006.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2006.patch"},"body":"The benchmarks runs are [failing](https:\/\/github.com\/huggingface\/datasets\/runs\/2055534629?check_suite_focus=true) because of \r\n```\r\nERROR: 'dvc.lock' is git-ignored.\r\n```\r\n\r\nI removed the dvc.lock file from the gitignore to fix that","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2005\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2005","id":824275035,"node_id":"MDU6SXNzdWU4MjQyNzUwMzU=","number":2005,"title":"Setting to torch format not working with torchvision and MNIST","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":9,"created_at":"2021-03-08T07:38:11Z","updated_at":"2021-03-09T17:58:13Z","closed_at":"2021-03-09T17:58:13Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi\r\n\r\nI am trying to use `torchvision.transforms` to handle the transformation of the image data in the `mnist` dataset. Assume I have a `transform` variable which contains the `torchvision.transforms` object.\r\n\r\nA snippet of what I am trying to do:\r\n```python\r\ndef prepare_features(examples):\r\n images = []\r\n labels = []\r\n for example_idx, example in enumerate(examples[\"image\"]):\r\n if transform is not None:\r\n images.append(transform(\r\n np.array(examples[\"image\"][example_idx], dtype=np.uint8)\r\n ))\r\n else:\r\n images.append(torch.tensor(np.array(examples[\"image\"][example_idx], dtype=np.uint8)))\r\n labels.append(torch.tensor(examples[\"label\"][example_idx]))\r\n output = {\"label\":labels, \"image\":images}\r\n return output\r\n\r\nraw_dataset = load_dataset('mnist')\r\ntrain_dataset = raw_dataset.map(prepare_features, batched=True, batch_size=10000)\r\ntrain_dataset.set_format(\"torch\",columns=[\"image\",\"label\"])\r\n```\r\n\r\nAfter this, I check the type of the following:\r\n```python\r\nprint(type(train_dataset[\"train\"][\"label\"]))\r\nprint(type(train_dataset[\"train\"][\"image\"][0]))\r\n```\r\nThis leads to the following output:\r\n\r\n```python\r\n\r\n\r\n```\r\nI use `torch.utils.DataLoader` for batches, the type of `batch[\"train\"][\"image\"]` is also ``.\r\n\r\nI don't understand why only the `label` is converted to a torch tensor, why does the image not get converted? How can I fix this issue?\r\n\r\nThanks,\r\nGunjan\r\n\r\nEDIT:\r\nI just checked the shapes, and the types, `batch[image]` is a actually a list of list of tensors. Shape is (1,28,2,28), where `batch_size` is 2. I don't understand why this is happening. Ideally it should be a tensor of shape (2,1,28,28).\r\n\r\nEDIT 2:\r\nInside `prepare_train_features`, the shape of `images[0]` is `torch.Size([1,28,28])`, the conversion is working. However, the output of the `map` is a list of list of list of list.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2004\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2004","id":824080760,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2MzcyODY1","number":2004,"title":"LaRoSeDa","user":{"login":"MihaelaGaman","id":6823177,"node_id":"MDQ6VXNlcjY4MjMxNzc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6823177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MihaelaGaman","html_url":"https:\/\/github.com\/MihaelaGaman","followers_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/followers","following_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/orgs","repos_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/repos","events_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-08T01:06:32Z","updated_at":"2021-03-17T10:43:20Z","closed_at":"2021-03-17T10:43:20Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2004","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2004","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2004.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2004.patch"},"body":"Add LaRoSeDa to huggingface datasets.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2003\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2003","id":824034678,"node_id":"MDU6SXNzdWU4MjQwMzQ2Nzg=","number":2003,"title":"Messages are being printed to the `stdout`","user":{"login":"mahnerak","id":1367529,"node_id":"MDQ6VXNlcjEzNjc1Mjk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1367529?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mahnerak","html_url":"https:\/\/github.com\/mahnerak","followers_url":"https:\/\/api.github.com\/users\/mahnerak\/followers","following_url":"https:\/\/api.github.com\/users\/mahnerak\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mahnerak\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mahnerak\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mahnerak\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mahnerak\/orgs","repos_url":"https:\/\/api.github.com\/users\/mahnerak\/repos","events_url":"https:\/\/api.github.com\/users\/mahnerak\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mahnerak\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-07T22:09:34Z","updated_at":"2021-03-15T17:47:47Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"In this code segment, we can see some messages are being printed to the `stdout`.\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/7e60bb509b595e8edc60a87f32b2bacfc065d607\/src\/datasets\/builder.py#L545-L554\r\nAccording to the comment, it is done intentionally, but I don't really understand why don't we log it with a higher level or print it directly to the `stderr`.\r\nIn my opinion, this kind of messages should never printed to the stdout. At least some configuration\/flag should make it possible to provide in order to explicitly prevent the package to contaminate the stdout.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2002\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2002","id":823955744,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2MjgwNzE3","number":2002,"title":"MOROCO","user":{"login":"MihaelaGaman","id":6823177,"node_id":"MDQ6VXNlcjY4MjMxNzc=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6823177?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/MihaelaGaman","html_url":"https:\/\/github.com\/MihaelaGaman","followers_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/followers","following_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/orgs","repos_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/repos","events_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/MihaelaGaman\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-07T16:22:17Z","updated_at":"2021-03-19T09:52:06Z","closed_at":"2021-03-19T09:52:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/2002","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2002","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2002.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/2002.patch"},"body":"Add MOROCO to huggingface datasets.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2001\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2001","id":823946706,"node_id":"MDU6SXNzdWU4MjM5NDY3MDY=","number":2001,"title":"Empty evidence document (\"provenance\") in KILT ELI5 dataset","user":{"login":"donggyukimc","id":16605764,"node_id":"MDQ6VXNlcjE2NjA1NzY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16605764?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/donggyukimc","html_url":"https:\/\/github.com\/donggyukimc","followers_url":"https:\/\/api.github.com\/users\/donggyukimc\/followers","following_url":"https:\/\/api.github.com\/users\/donggyukimc\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/donggyukimc\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/donggyukimc\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/donggyukimc\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/donggyukimc\/orgs","repos_url":"https:\/\/api.github.com\/users\/donggyukimc\/repos","events_url":"https:\/\/api.github.com\/users\/donggyukimc\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/donggyukimc\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-07T15:41:35Z","updated_at":"2021-03-17T05:51:01Z","closed_at":"2021-03-17T05:51:01Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"In the original KILT benchmark(https:\/\/github.com\/facebookresearch\/KILT), \r\n\r\nall samples has its evidence document (i.e. wikipedia page id) for prediction.\r\n\r\nFor example, a sample in ELI5 dataset has the format including provenance (=evidence document) like this\r\n\r\n`{\"id\": \"1kiwfx\", \"input\": \"In Trading Places (1983, Akroyd\/Murphy) how does the scheme at the end of the movie work? Why would buying a lot of OJ at a high price ruin the Duke Brothers?\", \"output\": [{\"answer\": \"I feel so old. People have been askinbg what happened at the end of this movie for what must be the last 15 years of my life. It never stops. Every year\/month\/fortnight, I see someone asking what happened, and someone explaining. Andf it will keep on happening, until I am 90yrs old, in a home, with nothing but the Internet and my bladder to keep me going. And there it will be: \\\"what happens at the end of Trading Places?\\\"\"}, {\"provenance\": [{\"wikipedia_id\": \"242855\", \"title\": \"Futures contract\", \"section\": \"Section::::Abstract.\", \"start_paragraph_id\": 1, \"start_character\": 14, \"end_paragraph_id\": 1, \"end_character\": 612, \"bleu_score\": 0.9232808519770748}]}], \"meta\": {\"partial_evidence\": [{\"wikipedia_id\": \"520990\", \"title\": \"Trading Places\", \"section\": \"Section::::Plot.\\n\", \"start_paragraph_id\": 7, \"end_paragraph_id\": 7, \"meta\": {\"evidence_span\": [\"On television, they learn that Clarence Beeks is transporting a secret USDA report on orange crop forecasts.\", \"On television, they learn that Clarence Beeks is transporting a secret USDA report on orange crop forecasts. Winthorpe and Valentine recall large payments made to Beeks by the Dukes and realize that the Dukes plan to obtain the report to corner the market on frozen orange juice.\", \"Winthorpe and Valentine recall large payments made to Beeks by the Dukes and realize that the Dukes plan to obtain the report to corner the market on frozen orange juice.\"]}}]}}`\r\n\r\nHowever, KILT ELI5 dataset from huggingface datasets library only contain empty list of provenance.\r\n\r\n`{'id': '1oy5tc', 'input': 'in football whats the point of wasting the first two plays with a rush - up the middle - not regular rush plays i get those', 'meta': {'left_context': '', 'mention': '', 'obj_surface': [], 'partial_evidence': [], 'right_context': '', 'sub_surface': [], 'subj_aliases': [], 'template_questions': []}, 'output': [{'answer': 'In most cases the O-Line is supposed to make a hole for the running back to go through. If you run too many plays to the outside\/throws the defense will catch on.\\n\\nAlso, 2 5 yard plays gets you a new set of downs.', 'meta': {'score': 2}, 'provenance': []}, {'answer': \"I you don't like those type of plays, watch CFL. We only get 3 downs so you can't afford to waste one. Lots more passing.\", 'meta': {'score': 2}, 'provenance': []}]}\r\n`\r\n\r\nshould i perform other procedure to obtain evidence documents?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/2000\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/2000","id":823899910,"node_id":"MDU6SXNzdWU4MjM4OTk5MTA=","number":2000,"title":"Windows Permission Error (most recent version of datasets)","user":{"login":"itsLuisa","id":73881148,"node_id":"MDQ6VXNlcjczODgxMTQ4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73881148?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/itsLuisa","html_url":"https:\/\/github.com\/itsLuisa","followers_url":"https:\/\/api.github.com\/users\/itsLuisa\/followers","following_url":"https:\/\/api.github.com\/users\/itsLuisa\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/itsLuisa\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/itsLuisa\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/itsLuisa\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/itsLuisa\/orgs","repos_url":"https:\/\/api.github.com\/users\/itsLuisa\/repos","events_url":"https:\/\/api.github.com\/users\/itsLuisa\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/itsLuisa\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-03-07T11:55:28Z","updated_at":"2021-03-09T12:42:57Z","closed_at":"2021-03-09T12:42:57Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi everyone,\r\nCan anyone help me with why the dataset loading script below raises a Windows Permission Error? I stuck quite closely to https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/conll2003\/conll2003.py , only I want to load the data from three local three-column tsv-files (id\\ttokens\\tpos_tags\\n). I am using the most recent version of datasets. Thank you in advance!\r\nLuisa\r\n\r\nMy script:\r\n```\r\nimport datasets\r\nimport csv\r\n\r\nlogger = datasets.logging.get_logger(__name__)\r\n\r\n\r\nclass SampleConfig(datasets.BuilderConfig):\r\n\r\n def __init__(self, **kwargs):\r\n super(SampleConfig, self).__init__(**kwargs)\r\n\r\n\r\nclass Sample(datasets.GeneratorBasedBuilder):\r\n BUILDER_CONFIGS = [\r\n SampleConfig(name=\"conll2003\", version=datasets.Version(\"1.0.0\"), description=\"Conll2003 dataset\"),\r\n ]\r\n\r\n def _info(self):\r\n return datasets.DatasetInfo(\r\n description=\"Dataset with words and their POS-Tags\",\r\n features=datasets.Features(\r\n {\r\n \"id\": datasets.Value(\"string\"),\r\n \"tokens\": datasets.Sequence(datasets.Value(\"string\")),\r\n \"pos_tags\": datasets.Sequence(\r\n datasets.features.ClassLabel(\r\n names=[\r\n \"''\",\r\n \",\",\r\n \"-LRB-\",\r\n \"-RRB-\",\r\n \".\",\r\n \":\",\r\n \"CC\",\r\n \"CD\",\r\n \"DT\",\r\n \"EX\",\r\n \"FW\",\r\n \"HYPH\",\r\n \"IN\",\r\n \"JJ\",\r\n \"JJR\",\r\n \"JJS\",\r\n \"MD\",\r\n \"NN\",\r\n \"NNP\",\r\n \"NNPS\",\r\n \"NNS\",\r\n \"PDT\",\r\n \"POS\",\r\n \"PRP\",\r\n \"PRP$\",\r\n \"RB\",\r\n \"RBR\",\r\n \"RBS\",\r\n \"RP\",\r\n \"TO\",\r\n \"UH\",\r\n \"VB\",\r\n \"VBD\",\r\n \"VBG\",\r\n \"VBN\",\r\n \"VBP\",\r\n \"VBZ\",\r\n \"WDT\",\r\n \"WP\",\r\n \"WRB\",\r\n \"``\"\r\n ]\r\n )\r\n ),\r\n }\r\n ),\r\n supervised_keys=None,\r\n homepage=\"https:\/\/catalog.ldc.upenn.edu\/LDC2011T03\",\r\n citation=\"Weischedel, Ralph, et al. OntoNotes Release 4.0 LDC2011T03. Web Download. Philadelphia: Linguistic Data Consortium, 2011.\",\r\n )\r\n\r\n def _split_generators(self, dl_manager):\r\n loaded_files = dl_manager.download_and_extract(self.config.data_files)\r\n return [\r\n datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={\"filepath\": loaded_files[\"train\"]}),\r\n datasets.SplitGenerator(name=datasets.Split.TEST, gen_kwargs={\"filepath\": loaded_files[\"test\"]}),\r\n datasets.SplitGenerator(name=datasets.Split.VALIDATION, gen_kwargs={\"filepath\": loaded_files[\"val\"]})\r\n ]\r\n\r\n def _generate_examples(self, filepath):\r\n logger.info(\"generating examples from = %s\", filepath)\r\n with open(filepath, encoding=\"cp1252\") as f:\r\n data = csv.reader(f, delimiter=\"\\t\")\r\n ids = list()\r\n tokens = list()\r\n pos_tags = list()\r\n for id_, line in enumerate(data):\r\n #print(line)\r\n if len(line) == 1:\r\n if tokens:\r\n yield id_, {\"id\": ids, \"tokens\": tokens, \"pos_tags\": pos_tags}\r\n ids = list()\r\n tokens = list()\r\n pos_tags = list()\r\n else:\r\n ids.append(line[0])\r\n tokens.append(line[1])\r\n pos_tags.append(line[2])\r\n # last example\r\n yield id_, {\"id\": ids, \"tokens\": tokens, \"pos_tags\": pos_tags}\r\n\r\n\r\ndef main():\r\n dataset = datasets.load_dataset(\r\n \"data_loading.py\", data_files={\r\n \"train\": \"train.tsv\",\r\n \"test\": \"test.tsv\",\r\n \"val\": \"val.tsv\"\r\n }\r\n )\r\n\r\n #print(dataset)\r\n\r\nif __name__==\"__main__\":\r\n main()\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1999\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1999","id":823753591,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2MTM5ODMy","number":1999,"title":"Add FashionMNIST dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-06T21:36:57Z","updated_at":"2021-03-09T09:52:11Z","closed_at":"2021-03-09T09:52:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1999","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1999","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1999.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1999.patch"},"body":"This PR adds [FashionMNIST](https:\/\/github.com\/zalandoresearch\/fashion-mnist) dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1998\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1998","id":823723960,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg2MTE4NTQ4","number":1998,"title":"Add -DOCSTART- note to dataset card of conll-like datasets","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-06T19:08:29Z","updated_at":"2021-03-11T02:20:07Z","closed_at":"2021-03-11T02:20:07Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1998","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1998","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1998.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1998.patch"},"body":"Closes #1983","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1997\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1997","id":823679465,"node_id":"MDU6SXNzdWU4MjM2Nzk0NjU=","number":1997,"title":"from datasets import MoleculeDataset, GEOMDataset","user":{"login":"futianfan","id":5087210,"node_id":"MDQ6VXNlcjUwODcyMTA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5087210?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/futianfan","html_url":"https:\/\/github.com\/futianfan","followers_url":"https:\/\/api.github.com\/users\/futianfan\/followers","following_url":"https:\/\/api.github.com\/users\/futianfan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/futianfan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/futianfan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/futianfan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/futianfan\/orgs","repos_url":"https:\/\/api.github.com\/users\/futianfan\/repos","events_url":"https:\/\/api.github.com\/users\/futianfan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/futianfan\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-06T15:50:19Z","updated_at":"2021-03-06T16:13:26Z","closed_at":"2021-03-06T16:13:26Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I met the ImportError: cannot import name 'MoleculeDataset' from 'datasets'. Have anyone met the similar issues? Thanks!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1996\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1996","id":823573410,"node_id":"MDU6SXNzdWU4MjM1NzM0MTA=","number":1996,"title":"Error when exploring `arabic_speech_corpus`","user":{"login":"elgeish","id":6879673,"node_id":"MDQ6VXNlcjY4Nzk2NzM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6879673?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/elgeish","html_url":"https:\/\/github.com\/elgeish","followers_url":"https:\/\/api.github.com\/users\/elgeish\/followers","following_url":"https:\/\/api.github.com\/users\/elgeish\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/elgeish\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/elgeish\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/elgeish\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/elgeish\/orgs","repos_url":"https:\/\/api.github.com\/users\/elgeish\/repos","events_url":"https:\/\/api.github.com\/users\/elgeish\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/elgeish\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"},{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""},{"id":2725241052,"node_id":"MDU6TGFiZWwyNzI1MjQxMDUy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/speech","name":"speech","color":"d93f0b","default":false,"description":""}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-06T05:55:20Z","updated_at":"2021-03-09T11:12:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Navigate to https:\/\/huggingface.co\/datasets\/viewer\/?dataset=arabic_speech_corpus\r\n\r\nError:\r\n```\r\nImportError: To be able to use this dataset, you need to install the following dependencies['soundfile'] using 'pip install soundfile' for instance'\r\nTraceback:\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/streamlit\/script_runner.py\", line 332, in _run_script\r\n exec(code, module.__dict__)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 233, in \r\n configs = get_confs(option)\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/streamlit\/caching.py\", line 604, in wrapped_func\r\n return get_or_create_cached_value()\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/streamlit\/caching.py\", line 588, in get_or_create_cached_value\r\n return_value = func(*args, **kwargs)\r\nFile \"\/home\/sasha\/nlp-viewer\/run.py\", line 145, in get_confs\r\n module_path = nlp.load.prepare_module(path, dataset=True\r\nFile \"\/home\/sasha\/.local\/share\/virtualenvs\/lib-ogGKnCK_\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 342, in prepare_module\r\n f\"To be able to use this {module_type}, you need to install the following dependencies\"\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1995\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1995","id":822878431,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg1NDI5NTg0","number":1995,"title":"[Timit_asr] Make sure not only the first sample is used ","user":{"login":"patrickvonplaten","id":23423619,"node_id":"MDQ6VXNlcjIzNDIzNjE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23423619?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patrickvonplaten","html_url":"https:\/\/github.com\/patrickvonplaten","followers_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/followers","following_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/orgs","repos_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/repos","events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patrickvonplaten\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-05T08:42:51Z","updated_at":"2021-06-30T06:25:53Z","closed_at":"2021-03-05T08:58:59Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1995","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1995","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1995.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1995.patch"},"body":"When playing around with timit I noticed that only the first sample is used for all indices. I corrected this typo so that the dataset is correctly loaded.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1994\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1994","id":822871238,"node_id":"MDU6SXNzdWU4MjI4NzEyMzg=","number":1994,"title":"not being able to get wikipedia es language","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-03-05T08:31:48Z","updated_at":"2021-03-11T20:46:21Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI am trying to run a code with wikipedia of config 20200501.es, getting:\r\n\r\nTraceback (most recent call last):\r\n File \"run_mlm_t5.py\", line 608, in \r\n main()\r\n File \"run_mlm_t5.py\", line 359, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name)\r\n File \"\/dara\/libs\/anaconda3\/envs\/success432\/lib\/python3.7\/site-packages\/datasets-1.2.1-py3.7.egg\/datasets\/load.py\", line 612, in load_dataset\r\n ignore_verifications=ignore_verifications,\r\n File \"\/dara\/libs\/anaconda3\/envs\/success432\/lib\/python3.7\/site-packages\/datasets-1.2.1-py3.7.egg\/datasets\/builder.py\", line 527, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/dara\/libs\/anaconda3\/envs\/success432\/lib\/python3.7\/site-packages\/datasets-1.2.1-py3.7.egg\/datasets\/builder.py\", line 1050, in _download_and_prepare\r\n \"\\n\\t`{}`\".format(usage_example)\r\ndatasets.builder.MissingBeamOptions: Trying to generate a dataset using Apache Beam, yet no Beam Runner or PipelineOptions() has been provided in `load_dataset` or in the builder arguments. For big datasets it has to run on large-scale data processing tools like Dataflow, Spark, etc. More information about Apache Beam runners at https:\/\/beam.apache.org\/documentation\/runners\/capability-matrix\/\r\nIf you really want to run it locally because you feel like the Dataset is small enough, you can use the local beam runner called `DirectRunner` (you may run out of memory). \r\nExample of usage: \r\n\t`load_dataset('wikipedia', '20200501.es', beam_runner='DirectRunner')`\r\n\r\nthanks @lhoestq for any suggestion\/help ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1993\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1993","id":822758387,"node_id":"MDU6SXNzdWU4MjI3NTgzODc=","number":1993,"title":"How to load a dataset with load_from disk and save it again after doing transformations without changing the original? ","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-05T05:25:50Z","updated_at":"2021-03-22T04:05:50Z","closed_at":"2021-03-22T04:05:50Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I am using the latest datasets library. In my work, I first use **load_from_disk** to load a data set that contains 3.8Gb information. Then during my training process, I update that dataset object and add new elements and save it in a different place. \r\n\r\nWhen I save the dataset with **save_to_disk**, the original dataset which is already in the disk also gets updated. I do not want to update it. How to prevent from this?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1992\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1992","id":822672238,"node_id":"MDU6SXNzdWU4MjI2NzIyMzg=","number":1992,"title":"`datasets.map` multi processing much slower than single processing ","user":{"login":"hwijeen","id":29157715,"node_id":"MDQ6VXNlcjI5MTU3NzE1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29157715?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hwijeen","html_url":"https:\/\/github.com\/hwijeen","followers_url":"https:\/\/api.github.com\/users\/hwijeen\/followers","following_url":"https:\/\/api.github.com\/users\/hwijeen\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hwijeen\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hwijeen\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hwijeen\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hwijeen\/orgs","repos_url":"https:\/\/api.github.com\/users\/hwijeen\/repos","events_url":"https:\/\/api.github.com\/users\/hwijeen\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hwijeen\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892857,"node_id":"MDU6TGFiZWwxOTM1ODkyODU3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/bug","name":"bug","color":"d73a4a","default":true,"description":"Something isn't working"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-05T02:10:02Z","updated_at":"2021-07-19T10:05:09Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, thank you for the great library.\r\n\r\nI've been using datasets to pretrain language models, and it often involves datasets as large as ~70G.\r\nMy data preparation step is roughly two steps: `load_dataset` which splits corpora into a table of sentences, and `map` converts a sentence into a list of integers, using a tokenizer.\r\n\r\nI noticed that `map` function with `num_proc=mp.cpu_count() \/\/2` takes more than 20 hours to finish the job where as `num_proc=1` gets the job done in about 5 hours. The machine I used has 40 cores, with 126G of RAM. There were no other jobs when `map` function was running.\r\n\r\nWhat could be the reason? I would be happy to provide information necessary to spot the reason.\r\n\r\np.s. I was experiencing the imbalance issue mentioned in [here](https:\/\/github.com\/huggingface\/datasets\/issues\/610#issuecomment-705177036) when I was using multi processing.\r\np.s.2 When I run `map` with `num_proc=1`, I see one tqdm bar but all the cores are working. When `num_proc=20`, only 20 cores work. \r\n![Screen Shot 2021-03-05 at 11 04 59](https:\/\/user-images.githubusercontent.com\/29157715\/110056895-ef6cf000-7da2-11eb-8307-6698e9fb1ad4.png)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1991\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1991","id":822554473,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg1MTYwNDkx","number":1991,"title":"Adding the conllpp dataset","user":{"login":"ZihanWangKi","id":21319243,"node_id":"MDQ6VXNlcjIxMzE5MjQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21319243?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZihanWangKi","html_url":"https:\/\/github.com\/ZihanWangKi","followers_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/followers","following_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/repos","events_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-04T22:19:43Z","updated_at":"2021-03-17T10:37:39Z","closed_at":"2021-03-17T10:37:39Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1991","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1991","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1991.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1991.patch"},"body":"Adding the conllpp dataset, is a revision from https:\/\/github.com\/huggingface\/datasets\/pull\/1910.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1990\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1990","id":822384502,"node_id":"MDU6SXNzdWU4MjIzODQ1MDI=","number":1990,"title":"OSError: Memory mapping file failed: Cannot allocate memory","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-04T18:21:58Z","updated_at":"2021-08-04T18:04:25Z","closed_at":"2021-08-04T18:04:25Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\nI am trying to run a code with a wikipedia dataset, here is the command to reproduce the error. You can find the codes for run_mlm.py in huggingface repo here: https:\/\/github.com\/huggingface\/transformers\/blob\/v4.3.2\/examples\/language-modeling\/run_mlm.py \r\n```\r\npython run_mlm.py --model_name_or_path bert-base-multilingual-cased --dataset_name wikipedia --dataset_config_name 20200501.en --do_train --do_eval --output_dir \/dara\/test --max_seq_length 128\r\n```\r\n\r\nI am using transformer version: 4.3.2 \r\n\r\nBut I got memory erorr using this dataset, is there a way I could save on memory with dataset library with wikipedia dataset?\r\nSpecially I need to train a model with multiple of wikipedia datasets concatenated. thank you very much @lhoestq for your help and suggestions:\r\n\r\n```\r\n File \"run_mlm.py\", line 441, in \r\n main()\r\n File \"run_mlm.py\", line 233, in main\r\n split=f\"train[{data_args.validation_split_percentage}%:]\",\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/load.py\", line 750, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 740, in as_dataset\r\n map_tuple=True,\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n return function(data_struct)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 757, in _build_single_dataset\r\n in_memory=in_memory,\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 829, in _as_dataset\r\n in_memory=in_memory,\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 215, in read\r\n return self.read_files(files=files, original_instructions=instructions, in_memory=in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 236, in read_files\r\n pa_table = self._read_files(files, in_memory=in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 171, in _read_files\r\n pa_table: pa.Table = self._get_dataset_from_filename(f_dict, in_memory=in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 302, in _get_dataset_from_filename\r\n pa_table = ArrowReader.read_table(filename, in_memory=in_memory)\r\n File \"\/dara\/libs\/anaconda3\/envs\/code\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/arrow_reader.py\", line 322, in read_table\r\n stream = stream_from(filename)\r\n File \"pyarrow\/io.pxi\", line 782, in pyarrow.lib.memory_map\r\n File \"pyarrow\/io.pxi\", line 743, in pyarrow.lib.MemoryMappedFile._open\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 99, in pyarrow.lib.check_status\r\nOSError: Memory mapping file failed: Cannot allocate memory\r\n```\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1989\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1989","id":822328147,"node_id":"MDU6SXNzdWU4MjIzMjgxNDc=","number":1989,"title":"Question\/problem with dataset labels","user":{"login":"ioana-blue","id":17202292,"node_id":"MDQ6VXNlcjE3MjAyMjky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17202292?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ioana-blue","html_url":"https:\/\/github.com\/ioana-blue","followers_url":"https:\/\/api.github.com\/users\/ioana-blue\/followers","following_url":"https:\/\/api.github.com\/users\/ioana-blue\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ioana-blue\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ioana-blue\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ioana-blue\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ioana-blue\/orgs","repos_url":"https:\/\/api.github.com\/users\/ioana-blue\/repos","events_url":"https:\/\/api.github.com\/users\/ioana-blue\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ioana-blue\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":10,"created_at":"2021-03-04T17:06:53Z","updated_at":"2021-03-11T09:44:15Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, I'm using a dataset with two labels \"nurse\" and \"not nurse\". For whatever reason (that I don't understand), I get an error that I think comes from the datasets package (using csv). Everything works fine if the labels are \"nurse\" and \"surgeon\". \r\n\r\nThis is the trace I get:\r\n\r\n```\r\nFile \"..\/..\/..\/models\/tr-4.3.2\/run_puppets.py\", line 523, in \r\n main()\r\n File \"..\/..\/..\/models\/tr-4.3.2\/run_puppets.py\", line 249, in main\r\n datasets = load_dataset(\"csv\", data_files=data_files)\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 740, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 572, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 650, in _download_and_prepare\r\n self._prepare_split(split_generator, **prepare_split_kwargs)\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 1028, in _prepare_split\r\n writer.write_table(table)\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/datasets\/arrow_writer.py\", line 292, in write_table\r\n pa_table = pa_table.cast(self._schema)\r\n File \"pyarrow\/table.pxi\", line 1311, in pyarrow.lib.Table.cast\r\n File \"pyarrow\/table.pxi\", line 265, in pyarrow.lib.ChunkedArray.cast\r\n File \"\/dccstor\/redrug_ier\/envs\/last-tr\/lib\/python3.8\/site-packages\/pyarrow\/compute.py\", line 87, in cast\r\n return call_function(\"cast\", [arr], options)\r\n File \"pyarrow\/_compute.pyx\", line 298, in pyarrow._compute.call_function\r\n File \"pyarrow\/_compute.pyx\", line 192, in pyarrow._compute.Function.call\r\n File \"pyarrow\/error.pxi\", line 122, in pyarrow.lib.pyarrow_internal_check_status\r\n File \"pyarrow\/error.pxi\", line 84, in pyarrow.lib.check_status\r\npyarrow.lib.ArrowInvalid: Failed to parse string: not nurse\r\n```\r\n\r\nAny ideas how to fix this? For now, I'll probably make them numeric. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1988\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1988","id":822324605,"node_id":"MDU6SXNzdWU4MjIzMjQ2MDU=","number":1988,"title":"Readme.md is misleading about kinds of datasets?","user":{"login":"surak","id":878399,"node_id":"MDQ6VXNlcjg3ODM5OQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/878399?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/surak","html_url":"https:\/\/github.com\/surak","followers_url":"https:\/\/api.github.com\/users\/surak\/followers","following_url":"https:\/\/api.github.com\/users\/surak\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/surak\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/surak\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/surak\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/surak\/orgs","repos_url":"https:\/\/api.github.com\/users\/surak\/repos","events_url":"https:\/\/api.github.com\/users\/surak\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/surak\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-04T17:04:20Z","updated_at":"2021-08-04T18:05:23Z","closed_at":"2021-08-04T18:05:23Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi!\r\n\r\nAt the README.MD, you say: \"efficient data pre-processing: simple, fast and reproducible data pre-processing for the above public datasets as well as your own local datasets in CSV\/JSON\/text. \"\r\n\r\nBut here:\r\nhttps:\/\/github.com\/huggingface\/datasets\/blob\/master\/templates\/new_dataset_script.py#L82-L117\r\n\r\nYou mention other kinds of datasets, with images and so on. I'm confused. \r\n\r\nIs it possible to use it to store, say, imagenet locally? ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1987\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1987","id":822308956,"node_id":"MDU6SXNzdWU4MjIzMDg5NTY=","number":1987,"title":"wmt15 is broken","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-04T16:46:25Z","updated_at":"2021-03-04T16:46:25Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"While testing the hotfix, I tried a random other wmt release and found wmt15 to be broken:\r\n```\r\npython -c 'from datasets import load_dataset; load_dataset(\"wmt15\", \"de-en\")' \r\nDownloading: 2.91kB [00:00, 818kB\/s]\r\nDownloading: 3.02kB [00:00, 897kB\/s]\r\nDownloading: 41.1kB [00:00, 19.1MB\/s]\r\nDownloading and preparing dataset wmt15\/de-en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/stas\/.cache\/huggingface\/datasets\/wmt15\/de-en\/1.0.0\/39ad5f9262a0910a8ad7028ad432731ad23fdf91f2cebbbf2ba4776b9859e87f...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 740, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 578, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 634, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/stas\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt15\/39ad5f9262a0910a8ad7028ad432731ad23fdf91f2cebbbf2ba4776b9859e87f\/wmt_utils.py\", line 757, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 283, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 191, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 203, in map_nested\r\n mapped = [\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 204, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 160, in _single_map_nested\r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 160, in \r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 142, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/download_manager.py\", line 214, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 274, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/home\/stas\/anaconda3\/envs\/main-38\/lib\/python3.8\/site-packages\/datasets\/utils\/file_utils.py\", line 614, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/huggingface.co\/datasets\/wmt\/wmt15\/resolve\/main\/training-parallel-nc-v10.tgz\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1986\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1986","id":822176290,"node_id":"MDU6SXNzdWU4MjIxNzYyOTA=","number":1986,"title":"wmt datasets fail to load","user":{"login":"sabania","id":32322564,"node_id":"MDQ6VXNlcjMyMzIyNTY0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32322564?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sabania","html_url":"https:\/\/github.com\/sabania","followers_url":"https:\/\/api.github.com\/users\/sabania\/followers","following_url":"https:\/\/api.github.com\/users\/sabania\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sabania\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sabania\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sabania\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sabania\/orgs","repos_url":"https:\/\/api.github.com\/users\/sabania\/repos","events_url":"https:\/\/api.github.com\/users\/sabania\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sabania\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-04T14:18:55Z","updated_at":"2021-03-04T14:31:07Z","closed_at":"2021-03-04T14:31:07Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"~\\.cache\\huggingface\\modules\\datasets_modules\\datasets\\wmt14\\43e717d978d2261502b0194999583acb874ba73b0f4aed0ada2889d1bb00f36e\\wmt_utils.py in _split_generators(self, dl_manager)\r\n 758 # Extract manually downloaded files.\r\n 759 manual_files = dl_manager.extract(manual_paths_dict)\r\n--> 760 extraction_map = dict(downloaded_files, **manual_files)\r\n 761 \r\n 762 for language in self.config.language_pair:\r\n\r\nTypeError: type object argument after ** must be a mapping, not list","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1985\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1985","id":822170651,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg0ODM4NjIw","number":1985,"title":"Optimize int precision","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-03-04T14:12:23Z","updated_at":"2021-03-22T12:04:40Z","closed_at":"2021-03-16T09:44:00Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1985","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1985","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1985.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1985.patch"},"body":"Optimize int precision to reduce dataset file size.\r\n\r\nClose #1973, close #1825, close #861.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1984\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1984","id":821816588,"node_id":"MDU6SXNzdWU4MjE4MTY1ODg=","number":1984,"title":"Add tests for WMT datasets","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-04T06:46:42Z","updated_at":"2021-03-04T06:46:42Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"As requested in #1981, we need tests for WMT datasets, using dummy data.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1983\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1983","id":821746008,"node_id":"MDU6SXNzdWU4MjE3NDYwMDg=","number":1983,"title":"The size of CoNLL-2003 is not consistant with the official release.","user":{"login":"h-peng17","id":39556019,"node_id":"MDQ6VXNlcjM5NTU2MDE5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/39556019?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/h-peng17","html_url":"https:\/\/github.com\/h-peng17","followers_url":"https:\/\/api.github.com\/users\/h-peng17\/followers","following_url":"https:\/\/api.github.com\/users\/h-peng17\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/h-peng17\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/h-peng17\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/h-peng17\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/h-peng17\/orgs","repos_url":"https:\/\/api.github.com\/users\/h-peng17\/repos","events_url":"https:\/\/api.github.com\/users\/h-peng17\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/h-peng17\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-03-04T04:41:34Z","updated_at":"2021-03-08T16:24:25Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Thanks for the dataset sharing! But when I use conll-2003, I meet some questions.\r\nThe statistics of conll-2003 in this repo is : \r\n\\#train 14041 \\#dev 3250 \\#test 3453\r\nWhile the official statistics is:\r\n\\#train 14987 \\#dev 3466 \\#test 3684\r\nWish for your reply~","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1982\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1982","id":821448791,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg0MjM2NzQ0","number":1982,"title":"Fix NestedDataStructure.data for empty dict","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-03-03T20:16:51Z","updated_at":"2021-03-04T16:46:04Z","closed_at":"2021-03-03T22:48:36Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1982","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1982","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1982.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1982.patch"},"body":"Fix #1981","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1981\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1981","id":821411109,"node_id":"MDU6SXNzdWU4MjE0MTExMDk=","number":1981,"title":"wmt datasets fail to load","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-03-03T19:21:39Z","updated_at":"2021-03-04T14:16:47Z","closed_at":"2021-03-03T22:48:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"on master:\r\n```\r\npython -c 'from datasets import load_dataset; load_dataset(\"wmt14\", \"de-en\")'\r\nDownloading and preparing dataset wmt14\/de-en (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/home\/stas\/.cache\/huggingface\/datasets\/wmt14\/de-en\/1.0.0\/43e717d978d2261502b0194999583acb874ba73b0f4aed0ada2889d1bb00f36e...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 740, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/builder.py\", line 578, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/builder.py\", line 634, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/stas\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt14\/43e717d978d2261502b0194999583acb874ba73b0f4aed0ada2889d1bb00f36e\/wmt_utils.py\", line 760, in _split_generators\r\n extraction_map = dict(downloaded_files, **manual_files)\r\n```\r\n\r\nit worked fine recently. same problem if I try wmt16.\r\n\r\ngit bisect points to this commit from Feb 25 as the culprit https:\/\/github.com\/huggingface\/datasets\/commit\/792f1d9bb1c5361908f73e2ef7f0181b2be409fa\r\n\r\n@albertvillanova ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1980\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1980","id":821312810,"node_id":"MDExOlB1bGxSZXF1ZXN0NTg0MTI1OTUy","number":1980,"title":"Loading all answers from drop","user":{"login":"KaijuML","id":25499439,"node_id":"MDQ6VXNlcjI1NDk5NDM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/25499439?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/KaijuML","html_url":"https:\/\/github.com\/KaijuML","followers_url":"https:\/\/api.github.com\/users\/KaijuML\/followers","following_url":"https:\/\/api.github.com\/users\/KaijuML\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/KaijuML\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/KaijuML\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/KaijuML\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/KaijuML\/orgs","repos_url":"https:\/\/api.github.com\/users\/KaijuML\/repos","events_url":"https:\/\/api.github.com\/users\/KaijuML\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/KaijuML\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-03T17:13:07Z","updated_at":"2021-03-15T11:27:26Z","closed_at":"2021-03-15T11:27:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1980","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1980","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1980.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1980.patch"},"body":"Hello all,\r\n\r\nI propose this change to the DROP loading script so that all answers are loaded no matter their type. Currently, only \"span\" answers are loaded, which excludes a significant amount of answers from drop (i.e. \"number\" and \"date\").\r\n\r\nI updated the script with the version I use for my work. However, I couldn't find a way to verify that all is working when integrated with the datasets repo, since the `load_dataset` method seems to always download the script from github and not local files.\r\n\r\nNote that 9 items from the train set have no answers, as well as 1 from the validation set. The script I propose simply do not load them.\r\n\r\nLet me know if there is anything else I can do,\r\nCl\u00e9ment","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1979\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1979","id":820977853,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzODQ3MTk3","number":1979,"title":"Add article_id and process test set template for semeval 2020 task 11\u2026","user":{"login":"hemildesai","id":8195444,"node_id":"MDQ6VXNlcjgxOTU0NDQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8195444?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/hemildesai","html_url":"https:\/\/github.com\/hemildesai","followers_url":"https:\/\/api.github.com\/users\/hemildesai\/followers","following_url":"https:\/\/api.github.com\/users\/hemildesai\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/hemildesai\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/hemildesai\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/hemildesai\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/hemildesai\/orgs","repos_url":"https:\/\/api.github.com\/users\/hemildesai\/repos","events_url":"https:\/\/api.github.com\/users\/hemildesai\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/hemildesai\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-03T10:34:32Z","updated_at":"2021-03-13T10:59:40Z","closed_at":"2021-03-12T13:10:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1979","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1979","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1979.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1979.patch"},"body":"\u2026 dataset\r\n\r\n- `article_id` is needed to create the submission file for the task at https:\/\/propaganda.qcri.org\/semeval2020-task11\/\r\n- The `technique classification` task provides the span indices in a template for the test set that is necessary to complete the task. This PR implements processing of that template for the dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1978\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1978","id":820956806,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzODI5Njgz","number":1978,"title":"Adding ro sts dataset","user":{"login":"lorinczb","id":36982089,"node_id":"MDQ6VXNlcjM2OTgyMDg5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/36982089?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lorinczb","html_url":"https:\/\/github.com\/lorinczb","followers_url":"https:\/\/api.github.com\/users\/lorinczb\/followers","following_url":"https:\/\/api.github.com\/users\/lorinczb\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lorinczb\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lorinczb\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lorinczb\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lorinczb\/orgs","repos_url":"https:\/\/api.github.com\/users\/lorinczb\/repos","events_url":"https:\/\/api.github.com\/users\/lorinczb\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lorinczb\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-03T10:08:53Z","updated_at":"2021-03-05T10:00:14Z","closed_at":"2021-03-05T09:33:55Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1978","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1978","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1978.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1978.patch"},"body":"Adding [RO-STS](https:\/\/github.com\/dumitrescustefan\/RO-STS) dataset","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1977\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1977","id":820312022,"node_id":"MDU6SXNzdWU4MjAzMTIwMjI=","number":1977,"title":"ModuleNotFoundError: No module named 'apache_beam' for wikipedia datasets ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-03-02T19:21:28Z","updated_at":"2021-03-03T10:17:40Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI am trying to run run_mlm.py code [1] of huggingface with following \"wikipedia\"\/ \"20200501.aa\" dataset:\r\n\r\n`python run_mlm.py --model_name_or_path bert-base-multilingual-cased --dataset_name wikipedia --dataset_config_name 20200501.aa --do_train --do_eval --output_dir \/tmp\/test-mlm --max_seq_length 256\r\n`\r\n\r\nI am getting this error, but as per documentation, huggingface dataset provide processed version of this dataset and users can load it without requiring setup extra settings for apache-beam. could you help me please to load this dataset? \r\nDo you think I can run run_ml.py with this dataset? or anyway I could subsample and train the model? I greatly appreciate providing the processed version of all languages for this dataset, which allow the user to use them without setting up apache-beam,. thanks \r\n\r\nI really appreciate your help.\r\n@lhoestq \r\n\r\nthanks.\r\n\r\n[1] https:\/\/github.com\/huggingface\/transformers\/blob\/master\/examples\/language-modeling\/run_mlm.py\r\n\r\nerror I get: \r\n\r\n```\r\n>>> import datasets \r\n>>> datasets.load_dataset(\"wikipedia\", \"20200501.aa\")\r\nDownloading and preparing dataset wikipedia\/20200501.aa (download: Unknown size, generated: Unknown size, post-processed: Unknown size, total: Unknown size) to \/dara\/temp\/cache_home_2\/datasets\/wikipedia\/20200501.aa\/1.0.0\/4021357e28509391eab2f8300d9b689e7e8f3a877ebb3d354b01577d497ebc63...\r\nTraceback (most recent call last):\r\n File \"\", line 1, in \r\n File \"\/dara\/temp\/libs\/anaconda3\/envs\/codes\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/load.py\", line 746, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/dara\/temp\/libs\/anaconda3\/envs\/codes\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 573, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/dara\/temp\/libs\/anaconda3\/envs\/codes\/lib\/python3.7\/site-packages\/datasets-1.3.0-py3.7.egg\/datasets\/builder.py\", line 1099, in _download_and_prepare\r\n import apache_beam as beam\r\nModuleNotFoundError: No module named 'apache_beam'\r\n\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1976\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976","id":820228538,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzMjA3NDI4","number":1976,"title":"Add datasets full offline mode with HF_DATASETS_OFFLINE","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-02T17:26:59Z","updated_at":"2021-03-03T15:45:31Z","closed_at":"2021-03-03T15:45:30Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1976","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1976.patch"},"body":"Add the HF_DATASETS_OFFLINE environment variable for users who want to use `datasets` offline without having to wait for the network timeouts\/retries to happen. This was requested in https:\/\/github.com\/huggingface\/datasets\/issues\/1939\r\n\r\ncc @stas00 ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1975\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1975","id":820205485,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzMTg4NjM3","number":1975,"title":"Fix flake8","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-02T16:59:13Z","updated_at":"2021-03-04T10:43:22Z","closed_at":"2021-03-04T10:43:22Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1975","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1975","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1975.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1975.patch"},"body":"Fix flake8 style.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1974\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1974","id":820122223,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgzMTE5MDI0","number":1974,"title":"feat(docs): navigate with left\/right arrow keys","user":{"login":"ydcjeff","id":32727188,"node_id":"MDQ6VXNlcjMyNzI3MTg4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/32727188?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ydcjeff","html_url":"https:\/\/github.com\/ydcjeff","followers_url":"https:\/\/api.github.com\/users\/ydcjeff\/followers","following_url":"https:\/\/api.github.com\/users\/ydcjeff\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ydcjeff\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ydcjeff\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ydcjeff\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ydcjeff\/orgs","repos_url":"https:\/\/api.github.com\/users\/ydcjeff\/repos","events_url":"https:\/\/api.github.com\/users\/ydcjeff\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ydcjeff\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-02T15:24:50Z","updated_at":"2021-03-04T10:44:12Z","closed_at":"2021-03-04T10:42:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1974","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1974","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1974.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1974.patch"},"body":"Enables docs navigation with left\/right arrow keys. It can be useful for the ones who navigate with keyboard a lot.\r\nMore info : https:\/\/github.com\/sphinx-doc\/sphinx\/pull\/2064\r\n\r\nYou can try here : https:\/\/29353-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/index.html","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1973\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1973","id":820077312,"node_id":"MDU6SXNzdWU4MjAwNzczMTI=","number":1973,"title":"Question: what gets stored in the datasets cache and why is it so huge?","user":{"login":"ioana-blue","id":17202292,"node_id":"MDQ6VXNlcjE3MjAyMjky","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/17202292?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ioana-blue","html_url":"https:\/\/github.com\/ioana-blue","followers_url":"https:\/\/api.github.com\/users\/ioana-blue\/followers","following_url":"https:\/\/api.github.com\/users\/ioana-blue\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ioana-blue\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ioana-blue\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ioana-blue\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ioana-blue\/orgs","repos_url":"https:\/\/api.github.com\/users\/ioana-blue\/repos","events_url":"https:\/\/api.github.com\/users\/ioana-blue\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ioana-blue\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":8,"created_at":"2021-03-02T14:35:53Z","updated_at":"2021-03-30T14:03:59Z","closed_at":"2021-03-16T09:44:00Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I'm running several training jobs (around 10) with a relatively large dataset (3M samples). The datasets cache reached 178G and it seems really large. What is it stored in there and why is it so large? I don't think I noticed this problem before and seems to be related to the new version of the datasets library. Any insight? Thank you!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1972\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1972","id":819752761,"node_id":"MDU6SXNzdWU4MTk3NTI3NjE=","number":1972,"title":"'Dataset' object has no attribute 'rename_column'","user":{"login":"farooqzaman1","id":23195502,"node_id":"MDQ6VXNlcjIzMTk1NTAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23195502?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/farooqzaman1","html_url":"https:\/\/github.com\/farooqzaman1","followers_url":"https:\/\/api.github.com\/users\/farooqzaman1\/followers","following_url":"https:\/\/api.github.com\/users\/farooqzaman1\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/farooqzaman1\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/farooqzaman1\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/farooqzaman1\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/farooqzaman1\/orgs","repos_url":"https:\/\/api.github.com\/users\/farooqzaman1\/repos","events_url":"https:\/\/api.github.com\/users\/farooqzaman1\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/farooqzaman1\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-02T08:01:49Z","updated_at":"2021-03-02T13:08:03Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"'Dataset' object has no attribute 'rename_column'","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1971\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1971","id":819714231,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgyNzgyNTU0","number":1971,"title":"Fix ArrowWriter closes stream at exit","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-03-02T07:12:34Z","updated_at":"2021-03-10T16:36:57Z","closed_at":"2021-03-10T16:36:57Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1971","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1971","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1971.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1971.patch"},"body":"Current implementation of ArrowWriter does not properly release the `stream` resource (by closing it) if its `finalize()` method is not called and\/or an Exception is raised before\/during the call to its `finalize()` method.\r\n\r\nTherefore, ArrowWriter should be used as a context manager that properly closes its `stream` resource at exit.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1970\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1970","id":819500620,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgyNjAzMzEw","number":1970,"title":"Fixing the URL filtering for bad MLSUM examples in GEM","user":{"login":"yjernite","id":10469459,"node_id":"MDQ6VXNlcjEwNDY5NDU5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10469459?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yjernite","html_url":"https:\/\/github.com\/yjernite","followers_url":"https:\/\/api.github.com\/users\/yjernite\/followers","following_url":"https:\/\/api.github.com\/users\/yjernite\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yjernite\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yjernite\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yjernite\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yjernite\/orgs","repos_url":"https:\/\/api.github.com\/users\/yjernite\/repos","events_url":"https:\/\/api.github.com\/users\/yjernite\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yjernite\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-03-02T01:22:58Z","updated_at":"2021-03-02T03:19:06Z","closed_at":"2021-03-02T02:01:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1970","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1970","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1970.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1970.patch"},"body":"This updates the code and metadata to use the updated `gem_mlsum_bad_ids_fixed.json` file provided by @juand-r\r\n\r\ncc @sebastianGehrmann ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1967\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1967","id":819129568,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgyMjc5OTEx","number":1967,"title":"Add Turkish News Category Dataset - 270K - Lite Version","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-01T18:21:59Z","updated_at":"2021-03-02T17:25:00Z","closed_at":"2021-03-02T17:25:00Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1967","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1967","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1967.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1967.patch"},"body":"This PR adds the Turkish News Categories Dataset (270K - Lite Version) dataset which is a text classification dataset by me, @basakbuluz and @serdarakyol.\r\nThis dataset contains the same news from the current [interpress_news_category_tr dataset](https:\/\/huggingface.co\/datasets\/interpress_news_category_tr) but contains less information, OCR errors are reduced, can be easily separated, and can be divided into 10 classes (\"k\u00fclt\u00fcrsanat\", \"ekonomi\", \"siyaset\", \"e\u011fitim\", \"d\u00fcnya\", \"spor\", \"teknoloji\", \"magazin\", \"sa\u011fl\u0131k\", \"g\u00fcndem\") were rearranged.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1966\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1966","id":819101253,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgyMjU2MzE0","number":1966,"title":"Fix metrics collision in separate multiprocessed experiments","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-03-01T17:45:18Z","updated_at":"2021-03-02T13:05:45Z","closed_at":"2021-03-02T13:05:44Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1966","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1966","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1966.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1966.patch"},"body":"As noticed in #1942 , there's a issue with locks if you run multiple separate evaluation experiments in a multiprocessed setup.\r\n\r\nIndeed there is a time span in Metric._finalize() where the process 0 loses its lock before re-acquiring it. This is bad since the lock of the process 0 tells the other process that the corresponding cache file is available for writing\/reading\/deleting: we end up having one metric cache that collides with another one. This can raise FileNotFound errors when a metric tries to read the cache file and if the second conflicting metric deleted it.\r\n\r\nTo fix that I made sure that the lock file of the process 0 stays acquired from the cache file creation to the end of the metric computation. This way the other metrics can simply sample a new hashing name in order to avoid the collision.\r\n\r\nFinally I added missing tests for separate experiments in distributed setup.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1965\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1965","id":818833460,"node_id":"MDU6SXNzdWU4MTg4MzM0NjA=","number":1965,"title":"Can we parallelized the add_faiss_index process over dataset shards ?","user":{"login":"shamanez","id":16892570,"node_id":"MDQ6VXNlcjE2ODkyNTcw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16892570?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/shamanez","html_url":"https:\/\/github.com\/shamanez","followers_url":"https:\/\/api.github.com\/users\/shamanez\/followers","following_url":"https:\/\/api.github.com\/users\/shamanez\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/shamanez\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/shamanez\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/shamanez\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/shamanez\/orgs","repos_url":"https:\/\/api.github.com\/users\/shamanez\/repos","events_url":"https:\/\/api.github.com\/users\/shamanez\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/shamanez\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-03-01T12:47:34Z","updated_at":"2021-03-04T19:40:56Z","closed_at":"2021-03-04T19:40:42Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I am thinking of making the **add_faiss_index** process faster. What if we run the add_faiss_index process on separate dataset shards and then combine them before (dataset.concatenate) saving the faiss.index file ?\r\n\r\nI feel theoretically this will reduce the accuracy of retrieval since it affects the indexing process.\r\n\r\n@lhoestq\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1964\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1964","id":818624864,"node_id":"MDU6SXNzdWU4MTg2MjQ4NjQ=","number":1964,"title":"Datasets.py function load_dataset does not match squad dataset","user":{"login":"LeopoldACC","id":44536699,"node_id":"MDQ6VXNlcjQ0NTM2Njk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/44536699?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LeopoldACC","html_url":"https:\/\/github.com\/LeopoldACC","followers_url":"https:\/\/api.github.com\/users\/LeopoldACC\/followers","following_url":"https:\/\/api.github.com\/users\/LeopoldACC\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LeopoldACC\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LeopoldACC\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LeopoldACC\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LeopoldACC\/orgs","repos_url":"https:\/\/api.github.com\/users\/LeopoldACC\/repos","events_url":"https:\/\/api.github.com\/users\/LeopoldACC\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LeopoldACC\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-03-01T08:41:31Z","updated_at":"2021-03-04T15:09:26Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"### 1 When I try to train lxmert,and follow the code in README that --dataset name:\r\n```shell \r\npython examples\/question-answering\/run_qa.py --model_name_or_path unc-nlp\/lxmert-base-uncased --dataset_name squad --do_train --do_eval --per_device_train_batch_size 12 --learning_rate 3e-5 --num_train_epochs 2 --max_seq_length 384 --doc_stride 128 --output_dir \/home2\/zhenggo1\/checkpoint\/lxmert_squad\r\n```\r\nthe bug is that:\r\n```\r\nDownloading and preparing dataset squad\/plain_text (download: 33.51 MiB, generated: 85.75 MiB, post-processed: Unknown size, total: 119.27 MiB) to \/home2\/zhenggo1\/.cache\/huggingface\/datasets\/squad\/plain_text\/1.0.0\/4c81550d83a2ac7c7ce23783bd8ff36642800e6633c1f18417fb58c3ff50cdd7...\r\nTraceback (most recent call last):\r\n File \"examples\/question-answering\/run_qa.py\", line 501, in \r\n main()\r\n File \"examples\/question-answering\/run_qa.py\", line 217, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name)\r\n File \"\/home2\/zhenggo1\/anaconda3\/envs\/lpot\/lib\/python3.7\/site-packages\/datasets\/load.py\", line 746, in load_dataset\r\n use_auth_token=use_auth_token,\r\n File \"\/home2\/zhenggo1\/anaconda3\/envs\/lpot\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 573, in download_and_prepare\r\n dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs\r\n File \"\/home2\/zhenggo1\/anaconda3\/envs\/lpot\/lib\/python3.7\/site-packages\/datasets\/builder.py\", line 633, in _download_and_prepare\r\n self.info.download_checksums, dl_manager.get_recorded_sizes_checksums(), \"dataset source files\"\r\n File \"\/home2\/zhenggo1\/anaconda3\/envs\/lpot\/lib\/python3.7\/site-packages\/datasets\/utils\/info_utils.py\", line 39, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/rajpurkar.github.io\/SQuAD-explorer\/dataset\/train-v1.1.json']\r\n```\r\nAnd I try to find the [checksum link](https:\/\/github.com\/huggingface\/datasets\/blob\/master\/datasets\/squad\/dataset_infos.json)\r\n,is the problem plain_text do not have a checksum?\r\n\r\n### 2 When I try to train lxmert,and use local dataset:\r\n```\r\npython examples\/question-answering\/run_qa.py --model_name_or_path unc-nlp\/lxmert-base-uncased --train_file $SQUAD_DIR\/train-v1.1.json --validation_file $SQUAD_DIR\/dev-v1.1.json --do_train --do_eval --per_device_train_batch_size 12 --learning_rate 3e-5 --num_train_epochs 2 --max_seq_length 384 --doc_stride 128 --output_dir \/home2\/zhenggo1\/checkpoint\/lxmert_squad\r\n```\r\nThe bug is that \r\n```\r\n['title', 'paragraphs']\r\nTraceback (most recent call last):\r\n File \"examples\/question-answering\/run_qa.py\", line 501, in \r\n main()\r\n File \"examples\/question-answering\/run_qa.py\", line 273, in main\r\n answer_column_name = \"answers\" if \"answers\" in column_names else column_names[2]\r\nIndexError: list index out of range\r\n```\r\nI print the answer_column_name and find that local squad dataset need the package datasets to preprocessing so that the code below can work:\r\n```\r\nif training_args.do_train:\r\n column_names = datasets[\"train\"].column_names\r\n else:\r\n column_names = datasets[\"validation\"].column_names\r\n print(datasets[\"train\"].column_names)\r\n question_column_name = \"question\" if \"question\" in column_names else column_names[0]\r\n context_column_name = \"context\" if \"context\" in column_names else column_names[1]\r\n answer_column_name = \"answers\" if \"answers\" in column_names else column_names[2]\r\n``` \r\n## Please tell me how to fix the bug,thks a lot!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1963\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1963","id":818289967,"node_id":"MDU6SXNzdWU4MTgyODk5Njc=","number":1963,"title":"bug in SNLI dataset ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-28T19:36:20Z","updated_at":"2021-03-01T12:01:29Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nThere is label of -1 in train set of SNLI dataset, please find the code below:\r\n\r\n```\r\nimport numpy as np \r\nimport datasets \r\ndata = datasets.load_dataset(\"snli\")[\"train\"]\r\nlabels = []\r\nfor d in data:\r\n labels.append(d[\"label\"])\r\nprint(np.unique(labels))\r\n```\r\n\r\nand results:\r\n\r\n`[-1 0 1 2]`\r\n\r\nversion of datasets used:\r\n`datasets 1.2.1 \r\n`\r\n\r\nthanks for your help. @lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1962\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1962","id":818089156,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgxNDQwNzM4","number":1962,"title":"Fix unused arguments","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-28T02:47:07Z","updated_at":"2021-03-11T02:18:17Z","closed_at":"2021-03-03T16:37:50Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1962","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1962","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1962.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1962.patch"},"body":"Noticed some args in the codebase are not used, so managed to find all such occurrences with Pylance and fix them.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1961\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1961","id":818077947,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgxNDM3NDI0","number":1961,"title":"Add sst dataset","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-28T02:08:29Z","updated_at":"2021-03-04T10:38:53Z","closed_at":"2021-03-04T10:38:53Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1961","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1961","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1961.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1961.patch"},"body":"Related to #1934—Add the Stanford Sentiment Treebank dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1960\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1960","id":818073154,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgxNDMzOTY4","number":1960,"title":"Allow stateful function in dataset.map","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-28T01:29:05Z","updated_at":"2021-03-23T15:26:49Z","closed_at":"2021-03-23T15:26:49Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1960","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1960","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1960.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1960.patch"},"body":"Removes the \"test type\" section in Dataset.map which would modify the state of the stateful function. Now, the return type of the map function is inferred after processing the first example.\r\n\r\nFixes #1940 \r\n\r\n@lhoestq Not very happy with the usage of `nonlocal`. Would like to hear your opinion on this.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1959\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1959","id":818055644,"node_id":"MDU6SXNzdWU4MTgwNTU2NDQ=","number":1959,"title":"Bug in skip_rows argument of load_dataset function ?","user":{"login":"LedaguenelArthur","id":73159756,"node_id":"MDQ6VXNlcjczMTU5NzU2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/73159756?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/LedaguenelArthur","html_url":"https:\/\/github.com\/LedaguenelArthur","followers_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/followers","following_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/orgs","repos_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/repos","events_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/LedaguenelArthur\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-27T23:32:54Z","updated_at":"2021-03-09T10:21:32Z","closed_at":"2021-03-09T10:21:32Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hello everyone,\r\n\r\nI'm quite new to Git so sorry in advance if I'm breaking some ground rules of issues posting... :\/\r\nI tried to use the load_dataset function, from Huggingface datasets library, on a csv file using the skip_rows argument described on Huggingface page to skip the first row containing column names\r\n\r\n`test_dataset = load_dataset('csv', data_files=['test_wLabel.tsv'], delimiter='\\t', column_names=[\"id\", \"sentence\", \"label\"], skip_rows=1)`\r\n\r\nBut I got the following error message\r\n\r\n`__init__() got an unexpected keyword argument 'skip_rows'`\r\n\r\nHave I used the wrong argument ? Am I missing something or is this a bug ?\r\n\r\nThank you very much for your time,\r\nBest regards,\r\nArthur","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1958\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1958","id":818037548,"node_id":"MDU6SXNzdWU4MTgwMzc1NDg=","number":1958,"title":"XSum dataset download link broken","user":{"login":"himat","id":1156974,"node_id":"MDQ6VXNlcjExNTY5NzQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1156974?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/himat","html_url":"https:\/\/github.com\/himat","followers_url":"https:\/\/api.github.com\/users\/himat\/followers","following_url":"https:\/\/api.github.com\/users\/himat\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/himat\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/himat\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/himat\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/himat\/orgs","repos_url":"https:\/\/api.github.com\/users\/himat\/repos","events_url":"https:\/\/api.github.com\/users\/himat\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/himat\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-27T21:47:56Z","updated_at":"2021-02-27T21:50:16Z","closed_at":"2021-02-27T21:50:16Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I did \r\n```\r\nfrom datasets import load_dataset\r\n\r\ndataset = load_dataset(\"xsum\")\r\n```\r\n\r\nThis returns\r\n`ConnectionError: Couldn't reach http:\/\/bollin.inf.ed.ac.uk\/public\/direct\/XSUM-EMNLP18-Summary-Data-Original.tar.gz`","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1957\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1957","id":818014624,"node_id":"MDU6SXNzdWU4MTgwMTQ2MjQ=","number":1957,"title":"[request] make load_metric api intutive","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-27T20:43:54Z","updated_at":"2021-02-27T22:21:10Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"```\r\nmetric = load_metric('glue', 'mrpc', num_process=num_process, process_id=rank)\r\n```\r\n\r\nMay I suggest that `num_process` is confusing as it's singular yet expects a plural value and either \r\n* be deprecated in favor of `num_processes` which is more intuitive since it's plural as its expected value\r\n* or even better why not mimic the established dist environment convention for that purpose, which uses `world_size`. \r\n\r\nSame for `process_id` - why reinvent the naming and needing to explain that this is **NOT** `PID`, when we have `rank` already. That is:\r\n\r\n```\r\nmetric = load_metric('glue', 'mrpc', world_size=world_size, rank=rank)\r\n```\r\n\r\nThis then fits like a glove into the pytorch DDP and alike envs. and we just need to call:\r\n\r\n* `dist.get_world_size()`\r\n* `dist.get_rank()`\r\n\r\nSo it'd be as simple as:\r\n\r\n```\r\nmetric = load_metric('glue', 'mrpc', world_size=dist.get_world_size(), rank=dist.get_rank())\r\n```\r\n\r\nFrom: https:\/\/pytorch.org\/docs\/stable\/distributed.html#torch.distributed.init_process_group\r\n\r\n* `world_size (int, optional)` \u2013 Number of processes participating in the job. Required if store is specified.\r\n* `rank (int, optional)` \u2013 Rank of the current process. Required if store is specified.\r\n\r\nAnd may be an example would be useful, so that the user doesn't even need to think about where to get `dist`:\r\n```\r\nimport torch.distributed as dist\r\nif dist.is_initialized():\r\n metric = load_metric(metric_name, world_size=dist.get_world_size(), rank=dist.get_rank())\r\nelse:\r\n metric = load_metric(metric_name)\r\n```\r\n\r\nI'm aware this is pytorch-centric, but it's better than no examples, IMHO.\r\n\r\nThank you.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1956\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1956","id":818013741,"node_id":"MDU6SXNzdWU4MTgwMTM3NDE=","number":1956,"title":"[distributed env] potentially unsafe parallel execution","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-27T20:38:45Z","updated_at":"2021-03-01T17:24:42Z","closed_at":"2021-03-01T17:24:42Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"```\r\nmetric = load_metric('glue', 'mrpc', num_process=num_process, process_id=rank)\r\n```\r\n\r\npresumes that there is only one set of parallel processes running - and will intermittently fail if you have multiple sets running as they will surely overwrite each other. Similar to https:\/\/github.com\/huggingface\/datasets\/issues\/1942 (but for a different reason).\r\nThat's why dist environments use some unique to a group identifier so that each group is dealt with separately. \r\n\r\ne.g. the env-way of pytorch dist syncing is done with a unique per set `MASTER_ADDRESS+MASTER_PORT`\r\n\r\nSo ideally this interface should ask for a shared secret to do the right thing.\r\n\r\nI'm not reporting an immediate need, but am only flagging that this will hit someone down the road.\r\n\r\nThis problem can be remedied by adding a new optional `shared_secret` option, which can then be used to differentiate different groups of processes. and this secret should be part of the file lock name and the experiment.\r\n\r\nThank you","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1955\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1955","id":818010664,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgxMzk2OTA5","number":1955,"title":"typos + grammar","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-27T20:21:43Z","updated_at":"2021-03-01T17:20:38Z","closed_at":"2021-03-01T14:43:19Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1955","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1955","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1955.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1955.patch"},"body":"This PR proposes a few typo + grammar fixes, and rewrites some sentences in an attempt to improve readability.\r\n\r\nN.B. When referring to the library `datasets` in the docs it is typically used as a singular, and it definitely is a singular when written as \"`datasets` library\", that is \"`datasets` library is ...\" and not \"are ...\".","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1954\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1954","id":817565563,"node_id":"MDU6SXNzdWU4MTc1NjU1NjM=","number":1954,"title":"add a new column ","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"assignees":[{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-02-26T18:17:27Z","updated_at":"2021-04-29T14:50:43Z","closed_at":"2021-04-29T14:50:43Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI'd need to add a new column to the dataset, I was wondering how this can be done? thanks \r\n@lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1953\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1953","id":817498869,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwOTgyMDMz","number":1953,"title":"Documentation for to_csv, to_pandas and to_dict","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-26T16:35:49Z","updated_at":"2021-03-01T14:03:48Z","closed_at":"2021-03-01T14:03:47Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1953","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1953","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1953.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1953.patch"},"body":"I added these methods to the documentation with a small paragraph.\r\n\r\nI also fixed some formatting issues in the docstrings","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1952\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1952","id":817428160,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwOTIyNjQw","number":1952,"title":"Handle timeouts","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-26T15:02:07Z","updated_at":"2021-03-01T14:29:24Z","closed_at":"2021-03-01T14:29:24Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1952","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1952","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1952.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1952.patch"},"body":"As noticed in https:\/\/github.com\/huggingface\/datasets\/issues\/1939, timeouts were not properly handled when loading a dataset.\r\nThis caused the connection to hang indefinitely when working in a firewalled environment cc @stas00 \r\n\r\nI added a default timeout, and included an option to our offline environment for tests to be able to simulate both connection errors and timeout errors (previously it was simulating connection errors only).\r\n\r\nNow networks calls don't hang indefinitely.\r\nThe default timeout is set to 10sec (we might reduce it).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1951\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1951","id":817423573,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwOTE4ODE2","number":1951,"title":"Add cross-platform support for datasets-cli","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-26T14:56:25Z","updated_at":"2021-03-11T02:18:26Z","closed_at":"2021-02-26T15:30:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1951","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1951","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1951.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1951.patch"},"body":"One thing I've noticed while going through the codebase is the usage of `scripts` in `setup.py`. This [answer](https:\/\/stackoverflow.com\/a\/28119736\/14095927) on SO explains it nicely why it's better to use `entry_points` instead of `scripts`. To add cross-platform support to the CLI, this PR replaces `scripts` with `entry_points` in `setup.py` and moves datasets-cli to src\/datasets\/commands\/datasets_cli.py. All *.md and *.rst files are updated accordingly. The same changes were made in the transformers repo to add cross-platform ([link to PR](https:\/\/github.com\/huggingface\/transformers\/pull\/4131)).","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1950\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1950","id":817295235,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwODExMjMz","number":1950,"title":"updated multi_nli dataset with missing fields","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-26T11:54:36Z","updated_at":"2021-03-01T11:08:30Z","closed_at":"2021-03-01T11:08:29Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1950","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1950","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1950.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1950.patch"},"body":"1) updated fields which were missing earlier\r\n2) added tags to README\r\n3) updated a few fields of README \r\n4) new dataset_infos.json and dummy files","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1949\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1949","id":816986936,"node_id":"MDU6SXNzdWU4MTY5ODY5MzY=","number":1949,"title":"Enable Fast Filtering using Arrow Dataset","user":{"login":"gchhablani","id":29076344,"node_id":"MDQ6VXNlcjI5MDc2MzQ0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/29076344?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/gchhablani","html_url":"https:\/\/github.com\/gchhablani","followers_url":"https:\/\/api.github.com\/users\/gchhablani\/followers","following_url":"https:\/\/api.github.com\/users\/gchhablani\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/gchhablani\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/gchhablani\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/gchhablani\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/gchhablani\/orgs","repos_url":"https:\/\/api.github.com\/users\/gchhablani\/repos","events_url":"https:\/\/api.github.com\/users\/gchhablani\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/gchhablani\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-26T02:53:37Z","updated_at":"2021-02-26T19:18:29Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi @lhoestq,\r\n\r\nAs mentioned in Issue #1796, I would love to work on enabling fast filtering\/mapping. Can you please share the expectations? It would be great if you could point me to the relevant methods\/files involved. Or the docs or maybe an overview of `arrow_dataset.py`. I only ask this because I am having trouble getting started ;-;\r\n\r\nAny help would be appreciated.\r\n\r\nThanks,\r\nGunjan","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1948\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1948","id":816689329,"node_id":"MDU6SXNzdWU4MTY2ODkzMjk=","number":1948,"title":"dataset loading logger level","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-25T18:33:37Z","updated_at":"2021-02-26T01:27:04Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"on master I get this with `--dataset_name wmt16 --dataset_config ro-en`:\r\n\r\n```\r\nWARNING:datasets.arrow_dataset:Loading cached processed dataset at \/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/9dc00622c30446e99c4c63d12a484ea4fb653f2f37c867d6edcec839d7eae50f\/cache-2e01bead8cf42e26.arrow\r\nWARNING:datasets.arrow_dataset:Loading cached processed dataset at \/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/9dc00622c30446e99c4c63d12a484ea4fb653f2f37c867d6edcec839d7eae50f\/cache-ac3bebaf4f91f776.arrow\r\nWARNING:datasets.arrow_dataset:Loading cached processed dataset at \/home\/stas\/.cache\/huggingface\/datasets\/wmt16\/ro-en\/1.0.0\/9dc00622c30446e99c4c63d12a484ea4fb653f2f37c867d6edcec839d7eae50f\/cache-810c3e61259d73a9.arrow\r\n```\r\n\r\nwhy are those WARNINGs? Should be INFO, no?\r\n\r\nwarnings should only be used when a user needs to pay attention to something, this is just informative - I'd even say it should be DEBUG, but definitely not WARNING.\r\n\r\nThank you.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1947\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1947","id":816590299,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwMjI2MDk5","number":1947,"title":"Update documentation with not in place transforms and update DatasetDict","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-25T16:23:18Z","updated_at":"2021-03-01T14:36:54Z","closed_at":"2021-03-01T14:36:53Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1947","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1947","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1947.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1947.patch"},"body":"In #1883 were added the not in-place transforms `flatten`, `remove_columns`, `rename_column` and `cast`.\r\n\r\nI added them to the documentation and added a paragraph on how to use them\r\n\r\nYou can preview the documentation [here](https:\/\/28862-250213286-gh.circle-artifacts.com\/0\/docs\/_build\/html\/processing.html#renaming-removing-casting-and-flattening-columns)\r\n\r\nI also added these methods to the DatasetDict class.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1946\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1946","id":816526294,"node_id":"MDExOlB1bGxSZXF1ZXN0NTgwMTcyNzI2","number":1946,"title":"Implement Dataset from CSV","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-25T15:10:13Z","updated_at":"2021-03-12T09:42:48Z","closed_at":"2021-03-12T09:42:48Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1946","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1946","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1946.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1946.patch"},"body":"Implement `Dataset.from_csv`.\r\n\r\nAnalogue to #1943.\r\n\r\nIf finally, the scripts should be used instead, at least we can reuse the tests here. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1945\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1945","id":816421966,"node_id":"MDU6SXNzdWU4MTY0MjE5NjY=","number":1945,"title":"AttributeError: 'DatasetDict' object has no attribute 'concatenate_datasets'","user":{"login":"dorost1234","id":79165106,"node_id":"MDQ6VXNlcjc5MTY1MTA2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/79165106?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/dorost1234","html_url":"https:\/\/github.com\/dorost1234","followers_url":"https:\/\/api.github.com\/users\/dorost1234\/followers","following_url":"https:\/\/api.github.com\/users\/dorost1234\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/dorost1234\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/dorost1234\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/dorost1234\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/dorost1234\/orgs","repos_url":"https:\/\/api.github.com\/users\/dorost1234\/repos","events_url":"https:\/\/api.github.com\/users\/dorost1234\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/dorost1234\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-25T13:09:45Z","updated_at":"2021-02-25T13:20:35Z","closed_at":"2021-02-25T13:20:26Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi\r\nI am trying to concatenate a list of huggingface datastes as:\r\n\r\n` train_dataset = datasets.concatenate_datasets(train_datasets)\r\n`\r\nHere is the `train_datasets` when I print:\r\n\r\n```\r\n[Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 120361\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 2670\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 6944\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 38140\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 173711\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 1655\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 4274\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 2019\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 2109\r\n}), Dataset({\r\n features: ['attention_mask', 'idx', 'input_ids', 'label', 'question1', 'question2', 'token_type_ids'],\r\n num_rows: 11963\r\n})]\r\n```\r\n\r\nI am getting the following error:\r\n\r\n`AttributeError: 'DatasetDict' object has no attribute 'concatenate_datasets'\r\n`\r\n\r\nI was wondering if you could help me with this issue, thanks a lot ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1944\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1944","id":816267216,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc5OTU2Nzc3","number":1944,"title":"Add Turkish News Category Dataset (270K - Lite Version)","user":{"login":"yavuzKomecoglu","id":5150963,"node_id":"MDQ6VXNlcjUxNTA5NjM=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5150963?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yavuzKomecoglu","html_url":"https:\/\/github.com\/yavuzKomecoglu","followers_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/followers","following_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/orgs","repos_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/repos","events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yavuzKomecoglu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-25T09:45:22Z","updated_at":"2021-03-02T17:46:41Z","closed_at":"2021-03-01T18:23:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1944","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1944","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1944.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1944.patch"},"body":"This PR adds the Turkish News Categories Dataset (270K - Lite Version) dataset which is a text classification dataset by me, @basakbuluz and @serdarakyol. \r\nThis dataset contains the same news from the current [interpress_news_category_tr dataset](https:\/\/huggingface.co\/datasets\/interpress_news_category_tr) but contains less information, OCR errors are reduced, can be easily separated, and can be divided into 10 classes (\"k\u00fclt\u00fcrsanat\", \"ekonomi\", \"siyaset\", \"e\u011fitim\", \"d\u00fcnya\", \"spor\", \"teknoloji\", \"magazin\", \"sa\u011fl\u0131k\", \"g\u00fcndem\") were rearranged.\r\n\r\n@SBrandeis @lhoestq, can you please review this PR?\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1943\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1943","id":816160453,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc5ODY5NTk0","number":1943,"title":"Implement Dataset from JSON and JSON Lines","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":11,"created_at":"2021-02-25T07:17:33Z","updated_at":"2021-03-18T09:42:08Z","closed_at":"2021-03-18T09:42:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1943","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1943","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1943.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1943.patch"},"body":"Implement `Dataset.from_jsonl`.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1942\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1942","id":816037520,"node_id":"MDU6SXNzdWU4MTYwMzc1MjA=","number":1942,"title":"[experiment] missing default_experiment-1-0.arrow","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":17,"created_at":"2021-02-25T03:02:15Z","updated_at":"2021-03-01T18:33:31Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"the original report was pretty bad and incomplete - my apologies!\r\n\r\nPlease see the complete version here: https:\/\/github.com\/huggingface\/datasets\/issues\/1942#issuecomment-786336481\r\n\r\n------------\r\n\r\nAs mentioned here https:\/\/github.com\/huggingface\/datasets\/issues\/1939 metrics don't get cached, looking at my local `~\/.cache\/huggingface\/metrics` - there are many `*.arrow.lock` files but zero metrics files.\r\n\r\nw\/o the network I get:\r\n```\r\nFileNotFoundError: [Errno 2] No such file or directory: '~\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow\r\n```\r\nthere is just `~\/.cache\/huggingface\/metrics\/sacrebleu\/default\/default_experiment-1-0.arrow.lock`\r\n\r\nI did run the same `run_seq2seq.py` script on the instance with network and it worked just fine, but only the lock file was left behind.\r\n\r\nthis is with master.\r\n\r\nThank you.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1941\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1941","id":815985167,"node_id":"MDU6SXNzdWU4MTU5ODUxNjc=","number":1941,"title":"Loading of FAISS index fails for index_name = 'exact'","user":{"login":"mkserge","id":2992022,"node_id":"MDQ6VXNlcjI5OTIwMjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2992022?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mkserge","html_url":"https:\/\/github.com\/mkserge","followers_url":"https:\/\/api.github.com\/users\/mkserge\/followers","following_url":"https:\/\/api.github.com\/users\/mkserge\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mkserge\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mkserge\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mkserge\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mkserge\/orgs","repos_url":"https:\/\/api.github.com\/users\/mkserge\/repos","events_url":"https:\/\/api.github.com\/users\/mkserge\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mkserge\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-02-25T01:30:54Z","updated_at":"2021-02-25T14:28:46Z","closed_at":"2021-02-25T14:28:46Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi,\r\n\r\nIt looks like loading of FAISS index now fails when using index_name = 'exact'.\r\n\r\nFor example, from the RAG [model card](https:\/\/huggingface.co\/facebook\/rag-token-nq?fbclid=IwAR3bTfhls5U_t9DqsX2Vzb7NhtRHxJxfQ-uwFT7VuCPMZUM2AdAlKF_qkI8#usage).\r\n\r\nRunning `transformers==4.3.2` and datasets installed from source on latest `master` branch.\r\n\r\n```bash\r\n(venv) sergey_mkrtchyan datasets (master) $ python\r\nPython 3.8.6 (v3.8.6:db455296be, Sep 23 2020, 13:31:39)\r\n[Clang 6.0 (clang-600.0.57)] on darwin\r\nType \"help\", \"copyright\", \"credits\" or \"license\" for more information.\r\n>>> from transformers import RagTokenizer, RagRetriever, RagTokenForGeneration\r\n>>> tokenizer = RagTokenizer.from_pretrained(\"facebook\/rag-token-nq\")\r\n>>> retriever = RagRetriever.from_pretrained(\"facebook\/rag-token-nq\", index_name=\"exact\", use_dummy_dataset=True)\r\nUsing custom data configuration dummy.psgs_w100.nq.no_index-dummy=True,with_index=False\r\nReusing dataset wiki_dpr (\/Users\/sergey_mkrtchyan\/.cache\/huggingface\/datasets\/wiki_dpr\/dummy.psgs_w100.nq.no_index-dummy=True,with_index=False\/0.0.0\/8a97e0f4fa5bc46e179474db6a61b09d5d2419d2911835bd3f91d110c936d8bb)\r\nUsing custom data configuration dummy.psgs_w100.nq.exact-50b6cda57ff32ab4\r\nReusing dataset wiki_dpr (\/Users\/sergey_mkrtchyan\/.cache\/huggingface\/datasets\/wiki_dpr\/dummy.psgs_w100.nq.exact-50b6cda57ff32ab4\/0.0.0\/8a97e0f4fa5bc46e179474db6a61b09d5d2419d2911835bd3f91d110c936d8bb)\r\n 0%| | 0\/10 [00:00\", line 1, in \r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/transformers\/models\/rag\/retrieval_rag.py\", line 425, in from_pretrained\r\n return cls(\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/transformers\/models\/rag\/retrieval_rag.py\", line 387, in __init__\r\n self.init_retrieval()\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/transformers\/models\/rag\/retrieval_rag.py\", line 458, in init_retrieval\r\n self.index.init_index()\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/transformers\/models\/rag\/retrieval_rag.py\", line 284, in init_index\r\n self.dataset = load_dataset(\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/load.py\", line 750, in load_dataset\r\n ds = builder_instance.as_dataset(split=split, ignore_verifications=ignore_verifications, in_memory=keep_in_memory)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/builder.py\", line 734, in as_dataset\r\n datasets = utils.map_nested(\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/utils\/py_utils.py\", line 195, in map_nested\r\n return function(data_struct)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/builder.py\", line 769, in _build_single_dataset\r\n post_processed = self._post_process(ds, resources_paths)\r\n File \"\/Users\/sergey_mkrtchyan\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wiki_dpr\/8a97e0f4fa5bc46e179474db6a61b09d5d2419d2911835bd3f91d110c936d8bb\/wiki_dpr.py\", line 205, in _post_process\r\n dataset.add_faiss_index(\"embeddings\", custom_index=index)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/arrow_dataset.py\", line 2516, in add_faiss_index\r\n super().add_faiss_index(\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/search.py\", line 416, in add_faiss_index\r\n faiss_index.add_vectors(self, column=column, train_size=train_size, faiss_verbose=faiss_verbose)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/huggingface\/datasets\/src\/datasets\/search.py\", line 281, in add_vectors\r\n self.faiss_index.add(vecs)\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/faiss\/__init__.py\", line 104, in replacement_add\r\n self.add_c(n, swig_ptr(x))\r\n File \"\/Users\/sergey_mkrtchyan\/workspace\/cformers\/venv\/lib\/python3.8\/site-packages\/faiss\/swigfaiss.py\", line 3263, in add\r\n return _swigfaiss.IndexHNSW_add(self, n, x)\r\nRuntimeError: Error in virtual void faiss::IndexHNSW::add(faiss::Index::idx_t, const float *) at \/Users\/runner\/work\/faiss-wheels\/faiss-wheels\/faiss\/faiss\/IndexHNSW.cpp:356: Error: 'is_trained' failed\r\n>>>\r\n```\r\n\r\nThe issue seems to be related to the scalar quantization in faiss added in this commit: 8c5220307c33f00e01c3bf7b8. Reverting it fixes the issue.\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1940\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1940","id":815770012,"node_id":"MDU6SXNzdWU4MTU3NzAwMTI=","number":1940,"title":"Side effect when filtering data due to `does_function_return_dict` call in `Dataset.map()`","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-24T19:18:56Z","updated_at":"2021-03-23T15:26:49Z","closed_at":"2021-03-23T15:26:49Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi there!\r\n\r\nIn my codebase I have a function to filter rows in a dataset, selecting only a certain number of examples per class. The function passes a extra argument to maintain a counter of the number of dataset rows\/examples already selected per each class, which are the ones I want to keep in the end:\r\n\r\n```python\r\n def fill_train_examples_per_class(example, per_class_limit: int, counter: collections.Counter):\r\n label = int(example['label'])\r\n current_counter = counter.get(label, 0)\r\n if current_counter < per_class_limit:\r\n counter[label] = current_counter + 1\r\n return True\r\n return False\r\n```\r\n\r\nAt some point I invoke it through the `Dataset.filter()` method in the `arrow_dataset.py` module like this:\r\n\r\n```python\r\n...\r\nkwargs = {\"per_class_limit\": train_examples_per_class_limit, \"counter\": Counter()}\r\ndatasets['train'] = datasets['train'].filter(fill_train_examples_per_class, num_proc=1, fn_kwargs=kwargs)\r\n...\r\n```\r\n\r\nThe problem is that, passing a stateful container (the counter,) provokes a side effect in the new filtered dataset obtained. This is due to the fact that at some point in `filter()`, the `map()`'s function `does_function_return_dict` is invoked in line [1290](https:\/\/github.com\/huggingface\/datasets\/blob\/96578adface7e4bc1f3e8bafbac920d72ca1ca60\/src\/datasets\/arrow_dataset.py#L1290). \r\n\r\nWhen this occurs, the state of the counter is initially modified by the effects of the function call on the 1 or 2 rows selected in lines 1288 and 1289 of the same file (which are marked as `test_inputs` & `test_indices` respectively in lines 1288 and 1289. This happens out of the control of the user (which for example can't reset the state of the counter before continuing the execution,) provoking in the end an undesired side effect in the results obtained. \r\n\r\nIn my case, the resulting dataset -despite of the counter results are ok- lacks an instance of the classes 0 and 1 (which happen to be the classes of the first two examples of my dataset.) The rest of the classes I have in my dataset, contain the right number of examples as they were not affected by the effects of `does_function_return_dict` call.\r\n\r\nI've debugged my code extensively and made a workaround myself hardcoding the necessary stuff (basically putting `update_data=True` in line 1290,) and then I obtain the results I expected without the side effect.\r\n\r\nIs there a way to avoid that call to `does_function_return_dict` in map()'s line 1290 ? (e.g. extracting the required information that `does_function_return_dict` returns without making the testing calls to the user function on dataset rows 0 & 1) \r\n\r\nThanks in advance,\r\n\r\nFrancisco Perez-Sorrosal\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1939\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1939","id":815680510,"node_id":"MDU6SXNzdWU4MTU2ODA1MTA=","number":1939,"title":"[firewalled env] OFFLINE mode","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":7,"created_at":"2021-02-24T17:13:42Z","updated_at":"2021-03-05T05:09:54Z","closed_at":"2021-03-05T05:09:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"This issue comes from a need to be able to run `datasets` in a firewalled env, which currently makes the software hang until it times out, as it's unable to complete the network calls.\r\n\r\nI propose the following approach to solving this problem, using the example of `run_seq2seq.py` as a sample program. There are 2 possible ways to going about it.\r\n\r\n## 1. Manual\r\n\r\nmanually prepare data and metrics files, that is transfer to the firewalled instance the dataset and the metrics and run:\r\n\r\n```\r\nDATASETS_OFFLINE=1 run_seq2seq.py --train_file xyz.csv --validation_file xyz.csv ...\r\n```\r\n\r\n`datasets` must not make any network calls and if there is a logic to do that and something is missing it should assert that this or that action requires network and therefore it can't proceed.\r\n\r\n## 2. Automatic\r\n\r\nIn some clouds one can prepare a datastorage ahead of time with a normal networked environment but which doesn't have gpus and then one switches to the gpu instance which is firewalled, but it can access all the cached data. This is the ideal situation, since in this scenario we don't have to do anything manually, but simply run the same application twice:\r\n\r\n1. on the non-firewalled instance:\r\n```\r\nrun_seq2seq.py --dataset_name wmt16 --dataset_config ro-en ...\r\n```\r\n\r\nwhich should download and cached everything.\r\n\r\n2. and then immediately after on the firewalled instance, which shares the same filesystem\r\n```\r\nDATASETS_OFFLINE=1 run_seq2seq.py --dataset_name wmt16 --dataset_config ro-en ...\r\n```\r\n\r\nand the metrics and datasets should be cached by the invocation number 1 and any network calls be skipped and if the logic is missing data it should assert and not try to fetch any data from online.\r\n\r\n## Common Issues\r\n\r\n1. for example currently `datasets` tries to look up online datasets if the files contain json or csv, despite the paths already provided\r\n\r\n```\r\n if dataset and path in _PACKAGED_DATASETS_MODULES:\r\n```\r\n\r\n2. it has an issue with metrics. e.g. I had to manually copy `rouge\/rouge.py` from the `datasets` repo to the current dir - or it was hanging.\r\n\r\nI had to comment out `head_hf_s3(...)` calls to make things work. So all those `try: head_hf_s3(...)` shouldn't be tried with `DATASETS_OFFLINE=1`\r\n\r\nHere is the corresponding issue for `transformers`: https:\/\/github.com\/huggingface\/transformers\/issues\/10379\r\n\r\nThanks.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1938\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1938","id":815647774,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc5NDQyNDkw","number":1938,"title":"Disallow ClassLabel with no names","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-24T16:37:57Z","updated_at":"2021-02-25T11:27:29Z","closed_at":"2021-02-25T11:27:29Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1938","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1938","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1938.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1938.patch"},"body":"It was possible to create a ClassLabel without specifying the names or the number of classes.\r\nThis was causing silent issues as in #1936 and breaking the conversion methods str2int and int2str.\r\n\r\ncc @justin-yan ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1937\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1937","id":815163943,"node_id":"MDU6SXNzdWU4MTUxNjM5NDM=","number":1937,"title":"CommonGen dataset page shows an error OSError: [Errno 28] No space left on device","user":{"login":"yuchenlin","id":10104354,"node_id":"MDQ6VXNlcjEwMTA0MzU0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10104354?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yuchenlin","html_url":"https:\/\/github.com\/yuchenlin","followers_url":"https:\/\/api.github.com\/users\/yuchenlin\/followers","following_url":"https:\/\/api.github.com\/users\/yuchenlin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yuchenlin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yuchenlin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yuchenlin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yuchenlin\/orgs","repos_url":"https:\/\/api.github.com\/users\/yuchenlin\/repos","events_url":"https:\/\/api.github.com\/users\/yuchenlin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yuchenlin\/received_events","type":"User","site_admin":false},"labels":[{"id":2107841032,"node_id":"MDU6TGFiZWwyMTA3ODQxMDMy","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/nlp-viewer","name":"nlp-viewer","color":"94203D","default":false,"description":""}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-24T06:47:33Z","updated_at":"2021-02-26T11:10:06Z","closed_at":"2021-02-26T11:10:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"The page of the CommonGen data https:\/\/huggingface.co\/datasets\/viewer\/?dataset=common_gen shows \r\n![image](https:\/\/user-images.githubusercontent.com\/10104354\/108959311-1865e600-7629-11eb-868c-cf4cb27034ea.png)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1936\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1936","id":814726512,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4NjY3NTQ4","number":1936,"title":"[WIP] Adding Support for Reading Pandas Category","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-02-23T18:32:54Z","updated_at":"2021-03-09T07:09:05Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1936","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1936","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1936.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1936.patch"},"body":"@lhoestq - continuing our conversation from https:\/\/github.com\/huggingface\/datasets\/issues\/1906#issuecomment-784247014\r\n\r\nThe goal of this PR is to support `Dataset.from_pandas(df)` where the dataframe contains a Category.\r\n\r\nJust the 4 line change below actually does seem to work:\r\n\r\n```\r\n>>> from datasets import Dataset\r\n>>> import pandas as pd\r\n>>> df = pd.DataFrame(pd.Series([\"a\", \"b\", \"c\", \"a\"], dtype=\"category\"))\r\n>>> ds = Dataset.from_pandas(df)\r\n>>> ds.to_pandas()\r\n 0\r\n0 a\r\n1 b\r\n2 c\r\n3 a\r\n>>> ds.to_pandas().dtypes\r\n0 category\r\ndtype: object\r\n```\r\n\r\nsave_to_disk, etc. all seem to work as well. The main things that are theoretically \"incorrect\" if we leave this are:\r\n\r\n```\r\n>>> ds.features.type\r\nStructType(struct<0: int64>)\r\n```\r\nthere are a decent number of references to this property in the library, but I can't find anything that seems to actually break as a result of this being int64 vs. dictionary? I think the gist of my question is: a) do we *need* to change the dtype of Classlabel and have get_nested_type return a pyarrow.DictionaryType instead of int64? and b) do you *want* it to change? The biggest challenge I see to implementing this correctly is that the data will need to be passed in along with the pyarrow schema when instantiating the Classlabel (I *think* this is unavoidable, since the type itself doesn't contain the actual label values) which could be a fairly intrusive change - e.g. `from_arrow_schema`'s interface would need to change to include optional arrow data? Once we start going down this path of modifying the public interfaces I am admittedly feeling a little bit outside of my comfort zone\r\n\r\nAdditionally I think `int2str`, `str2int`, and `encode_example` probably won't work - but I can't find any usages of them in the library itself.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1935\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1935","id":814623827,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4NTgyMzk1","number":1935,"title":"add CoVoST2","user":{"login":"patil-suraj","id":27137566,"node_id":"MDQ6VXNlcjI3MTM3NTY2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/27137566?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patil-suraj","html_url":"https:\/\/github.com\/patil-suraj","followers_url":"https:\/\/api.github.com\/users\/patil-suraj\/followers","following_url":"https:\/\/api.github.com\/users\/patil-suraj\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patil-suraj\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patil-suraj\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patil-suraj\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patil-suraj\/orgs","repos_url":"https:\/\/api.github.com\/users\/patil-suraj\/repos","events_url":"https:\/\/api.github.com\/users\/patil-suraj\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patil-suraj\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-23T16:28:16Z","updated_at":"2021-02-24T18:09:32Z","closed_at":"2021-02-24T18:05:09Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1935","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1935","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1935.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1935.patch"},"body":"This PR adds the CoVoST2 dataset for speech translation and ASR.\r\nhttps:\/\/github.com\/facebookresearch\/covost#covost-2\r\n\r\nThe dataset requires manual download as the download page requests an email address and the URLs are temporary.\r\n\r\nThe dummy data is a bit bigger because of the mp3 files and 36 configs.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1934\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1934","id":814437190,"node_id":"MDU6SXNzdWU4MTQ0MzcxOTA=","number":1934,"title":"Add Stanford Sentiment Treebank (SST)","user":{"login":"patpizio","id":15801338,"node_id":"MDQ6VXNlcjE1ODAxMzM4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15801338?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/patpizio","html_url":"https:\/\/github.com\/patpizio","followers_url":"https:\/\/api.github.com\/users\/patpizio\/followers","following_url":"https:\/\/api.github.com\/users\/patpizio\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/patpizio\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/patpizio\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/patpizio\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/patpizio\/orgs","repos_url":"https:\/\/api.github.com\/users\/patpizio\/repos","events_url":"https:\/\/api.github.com\/users\/patpizio\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/patpizio\/received_events","type":"User","site_admin":false},"labels":[{"id":2067376369,"node_id":"MDU6TGFiZWwyMDY3Mzc2MzY5","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20request","name":"dataset request","color":"e99695","default":false,"description":"Requesting to add a new dataset"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-23T12:53:16Z","updated_at":"2021-03-18T17:51:44Z","closed_at":"2021-03-18T17:51:44Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I am going to add SST:\r\n\r\n- **Name:** The Stanford Sentiment Treebank\r\n- **Description:** The first corpus with fully labeled parse trees that allows for a complete analysis of the compositional effects of sentiment in language\r\n- **Paper:** [Recursive Deep Models for Semantic Compositionality Over a Sentiment Treebank](https:\/\/nlp.stanford.edu\/~socherr\/EMNLP2013_RNTN.pdf)\r\n- **Data:** https:\/\/nlp.stanford.edu\/sentiment\/index.html\r\n- **Motivation:** Already requested in #353, SST is a popular dataset for Sentiment Classification\r\n\r\nWhat's the difference with the [_SST-2_](https:\/\/huggingface.co\/datasets\/viewer\/?dataset=glue&config=sst2) dataset included in GLUE? Essentially, SST-2 is a version of SST where:\r\n- the labels were mapped from real numbers in [0.0, 1.0] to a binary label: {0, 1}\r\n- the labels of the *sub-sentences* were included only in the training set\r\n- the labels in the test set are obfuscated\r\n\r\nSo there is a lot more information in the original SST. The tricky bit is, the data is scattered into many text files and, for one in particular, I couldn't find the original encoding ([*but I'm not the only one*](https:\/\/groups.google.com\/g\/word2vec-toolkit\/c\/QIUjLw6RqFk\/m\/_iEeyt428wkJ) \ud83c\udfb5). The only solution I found was to manually replace all the \u00e8, \u00eb, \u00e7 and so on into an `utf-8` copy of the text file. I uploaded the result in my Dropbox and I am using that as the main repo for the dataset.\r\n\r\nAlso, the _sub-sentences_ are built at run-time from the information encoded in several text files, so generating the examples is a bit more cumbersome than usual. Luckily, the dataset is not enormous.\r\n\r\nI plan to divide the dataset in 2 configs: one with just whole sentences with their labels, the other with sentences _and their sub-sentences_ with their labels. Each config will be split in train, validation and test. Hopefully this makes sense, we may discuss it in the PR I'm going to submit.\r\n\r\n\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1933\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1933","id":814335846,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4MzQwMzk3","number":1933,"title":"Use arrow ipc file format","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-23T10:38:24Z","updated_at":"2021-02-23T10:38:24Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1933","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1933","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1933.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1933.patch"},"body":"According to the [documentation](https:\/\/arrow.apache.org\/docs\/format\/Columnar.html?highlight=arrow1#ipc-file-format), it's identical to the streaming format except that it contains the memory offsets of each sample:\r\n\r\n> We define a \u201cfile format\u201d supporting random access that is build with the stream format. The file starts and ends with a magic string ARROW1 (plus padding). What follows in the file is identical to the stream format. At the end of the file, we write a footer containing a redundant copy of the schema (which is a part of the streaming format) plus memory offsets and sizes for each of the data blocks in the file. This enables random access any record batch in the file. See File.fbs for the precise details of the file footer.\r\n\r\nSince it stores more metadata regarding the positions of the examples in the file, it should enable better example retrieval performances. However from the discussion in https:\/\/github.com\/huggingface\/datasets\/issues\/1803 it looks like it's not the case unfortunately. Maybe in the future this will allow speed gains.\r\n\r\nI think it's still a good idea to start using it anyway for these reasons:\r\n- in the future we may have speed gains\r\n- it contains the arrow streaming format data\r\n- it's compatible with the pyarrow Dataset implementation (it allows to load remote dataframes for example) if we want to use it in the future\r\n- it's also the format used by arrow feather if we want to use it in the future\r\n- it's roughly the same size as the streaming format\r\n- it's easy to have backward compatibility with the streaming format\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1932\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1932","id":814326116,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4MzMyMTQy","number":1932,"title":"Fix builder config creation with data_dir","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-23T10:26:02Z","updated_at":"2021-02-23T10:45:28Z","closed_at":"2021-02-23T10:45:27Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1932","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1932","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1932.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1932.patch"},"body":"The data_dir parameter wasn't taken into account to create the config_id, therefore the resulting builder config was considered not custom. However a builder config that is non-custom must not have a name that collides with the predefined builder config names. Therefore it resulted in a `ValueError(\"Cannot name a custom BuilderConfig the same as an available...\")`\r\n\r\nI fixed that by commenting the line that used to ignore the data_dir when creating the config.\r\n\r\nIt was previously ignored before the introduction of config id because we didn't want to change the config name. Now it's fine to take it into account for the config id.\r\n\r\nNow creating a config with a data_dir works again @patrickvonplaten ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1931\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1931","id":814225074,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4MjQ4NTA5","number":1931,"title":"add m_lama (multilingual lama) dataset","user":{"login":"pdufter","id":13961899,"node_id":"MDQ6VXNlcjEzOTYxODk5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/13961899?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pdufter","html_url":"https:\/\/github.com\/pdufter","followers_url":"https:\/\/api.github.com\/users\/pdufter\/followers","following_url":"https:\/\/api.github.com\/users\/pdufter\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pdufter\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pdufter\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pdufter\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pdufter\/orgs","repos_url":"https:\/\/api.github.com\/users\/pdufter\/repos","events_url":"https:\/\/api.github.com\/users\/pdufter\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pdufter\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-23T08:11:57Z","updated_at":"2021-03-01T10:01:03Z","closed_at":"2021-03-01T10:01:03Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1931","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1931","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1931.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1931.patch"},"body":"Add a multilingual (machine translated and automatically generated) version of the LAMA benchmark. For details see the paper https:\/\/arxiv.org\/pdf\/2102.00894.pdf ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1930\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1930","id":814055198,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc4MTAwNzI0","number":1930,"title":"updated the wino_bias dataset","user":{"login":"JieyuZhao","id":22306304,"node_id":"MDQ6VXNlcjIyMzA2MzA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22306304?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JieyuZhao","html_url":"https:\/\/github.com\/JieyuZhao","followers_url":"https:\/\/api.github.com\/users\/JieyuZhao\/followers","following_url":"https:\/\/api.github.com\/users\/JieyuZhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JieyuZhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JieyuZhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JieyuZhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JieyuZhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/JieyuZhao\/repos","events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-23T03:07:40Z","updated_at":"2021-04-07T15:24:56Z","closed_at":"2021-04-07T15:24:56Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1930","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1930","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1930.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1930.patch"},"body":"Updated the wino_bias.py script.\r\n- updated the data_url\r\n- added different configurations for different data splits\r\n- added the coreference_cluster to the data features","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1929\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1929","id":813929669,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3OTk1MTE4","number":1929,"title":"Improve typing and style and fix some inconsistencies","user":{"login":"mariosasko","id":47462742,"node_id":"MDQ6VXNlcjQ3NDYyNzQy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/47462742?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mariosasko","html_url":"https:\/\/github.com\/mariosasko","followers_url":"https:\/\/api.github.com\/users\/mariosasko\/followers","following_url":"https:\/\/api.github.com\/users\/mariosasko\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mariosasko\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mariosasko\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mariosasko\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mariosasko\/orgs","repos_url":"https:\/\/api.github.com\/users\/mariosasko\/repos","events_url":"https:\/\/api.github.com\/users\/mariosasko\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mariosasko\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-22T22:47:41Z","updated_at":"2021-02-24T16:16:14Z","closed_at":"2021-02-24T14:03:54Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1929","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1929","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1929.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1929.patch"},"body":"This PR:\r\n* improves typing (mostly more consistent use of `typing.Optional`)\r\n* `DatasetDict.cleanup_cache_files` now correctly returns a dict \r\n* replaces `dict()` with the corresponding literal\r\n* uses `dict_to_copy.copy()` instead of `dict(dict_to_copy)` for shallow copying","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1928\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1928","id":813793434,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3ODgyMDM4","number":1928,"title":"Updating old cards","user":{"login":"mcmillanmajora","id":26722925,"node_id":"MDQ6VXNlcjI2NzIyOTI1","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26722925?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/mcmillanmajora","html_url":"https:\/\/github.com\/mcmillanmajora","followers_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/followers","following_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/orgs","repos_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/repos","events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/mcmillanmajora\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-22T19:26:04Z","updated_at":"2021-02-23T18:19:25Z","closed_at":"2021-02-23T18:19:25Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1928","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1928","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1928.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1928.patch"},"body":"Updated the cards for [Allocine](https:\/\/github.com\/mcmillanmajora\/datasets\/tree\/updating-old-cards\/datasets\/allocine), [CNN\/DailyMail](https:\/\/github.com\/mcmillanmajora\/datasets\/tree\/updating-old-cards\/datasets\/cnn_dailymail), and [SNLI](https:\/\/github.com\/mcmillanmajora\/datasets\/tree\/updating-old-cards\/datasets\/snli). For the most part, the information was just rearranged or rephrased, but the social impact statements are new. ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1927\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1927","id":813768935,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3ODYxODM5","number":1927,"title":"Update README.md","user":{"login":"JieyuZhao","id":22306304,"node_id":"MDQ6VXNlcjIyMzA2MzA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22306304?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JieyuZhao","html_url":"https:\/\/github.com\/JieyuZhao","followers_url":"https:\/\/api.github.com\/users\/JieyuZhao\/followers","following_url":"https:\/\/api.github.com\/users\/JieyuZhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JieyuZhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JieyuZhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JieyuZhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JieyuZhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/JieyuZhao\/repos","events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-22T18:51:34Z","updated_at":"2021-02-23T10:52:45Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1927","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1927","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1927.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1927.patch"},"body":"Updated the info for the wino_bias dataset.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1926\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1926","id":813607994,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3NzI4Mjgy","number":1926,"title":"Fix: Wiki_dpr - add missing scalar quantizer","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-22T15:32:05Z","updated_at":"2021-02-22T15:49:54Z","closed_at":"2021-02-22T15:49:53Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1926","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1926","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1926.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1926.patch"},"body":"All the prebuilt wiki_dpr indexes already use SQ8, I forgot to update the wiki_dpr script after building them. Now it's finally done.\r\n\r\nThe scalar quantizer SQ8 doesn't reduce the performance of the index as shown in retrieval experiments on RAG.\r\nThe quantizer reduces the size of the index a lot but increases index building time.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1925\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1925","id":813600902,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3NzIyMzc3","number":1925,"title":"Fix: Wiki_dpr - fix when with_embeddings is False or index_name is \"no_index\"","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-22T15:23:46Z","updated_at":"2021-02-25T01:33:48Z","closed_at":"2021-02-22T15:36:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1925","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1925","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1925.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1925.patch"},"body":"Fix the bugs noticed in #1915 \r\n\r\nThere was a bug when `with_embeddings=False` where the configuration name was the same as if `with_embeddings=True`, which led the dataset builder to do bad verifications (for example it used to expect to download the embeddings for `with_embeddings=False`).\r\n\r\nAnother issue was that setting `index_name=\"no_index\"` didn't set `with_index` to False.\r\n\r\nI fixed both of them and added dummy data for those configurations for testing.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1924\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1924","id":813599733,"node_id":"MDU6SXNzdWU4MTM1OTk3MzM=","number":1924,"title":"Anonymous Dataset Addition (i.e Anonymous PR?)","user":{"login":"PierreColombo","id":22492839,"node_id":"MDQ6VXNlcjIyNDkyODM5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22492839?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/PierreColombo","html_url":"https:\/\/github.com\/PierreColombo","followers_url":"https:\/\/api.github.com\/users\/PierreColombo\/followers","following_url":"https:\/\/api.github.com\/users\/PierreColombo\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/PierreColombo\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/PierreColombo\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/PierreColombo\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/PierreColombo\/orgs","repos_url":"https:\/\/api.github.com\/users\/PierreColombo\/repos","events_url":"https:\/\/api.github.com\/users\/PierreColombo\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/PierreColombo\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-22T15:22:30Z","updated_at":"2021-02-23T18:28:10Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hello,\r\nThanks a lot for your librairy.\r\nWe plan to submit a paper on OpenReview using the Anonymous setting. Is it possible to add a new dataset without breaking the anonimity, with a link to the paper ? \r\nCheers \r\n@eusip","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1923\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1923","id":813363472,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3NTI0MTU0","number":1923,"title":"Fix save_to_disk with relative path","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-22T10:27:19Z","updated_at":"2021-02-22T11:22:44Z","closed_at":"2021-02-22T11:22:43Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1923","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1923","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1923.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1923.patch"},"body":"As noticed in #1919 and #1920 the target directory was not created using `makedirs` so saving to it raises `FileNotFoundError`. For absolute paths it works but not for the good reason. This is because the target path was the same as the temporary path where in-memory data are written as an intermediary step.\r\n\r\nI added the `makedirs` call using `fs.makedirs` in order to support remote filesystems.\r\nI also fixed the issue with the target path being the temporary path.\r\n\r\nI added a test case for relative paths as well for save_to_disk.\r\n\r\nThanks to @M-Salti for reporting and investigating","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1922\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1922","id":813140806,"node_id":"MDU6SXNzdWU4MTMxNDA4MDY=","number":1922,"title":"How to update the \"wino_bias\" dataset","user":{"login":"JieyuZhao","id":22306304,"node_id":"MDQ6VXNlcjIyMzA2MzA0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/22306304?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/JieyuZhao","html_url":"https:\/\/github.com\/JieyuZhao","followers_url":"https:\/\/api.github.com\/users\/JieyuZhao\/followers","following_url":"https:\/\/api.github.com\/users\/JieyuZhao\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/JieyuZhao\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/JieyuZhao\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/JieyuZhao\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/JieyuZhao\/orgs","repos_url":"https:\/\/api.github.com\/users\/JieyuZhao\/repos","events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/JieyuZhao\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-22T05:39:39Z","updated_at":"2021-02-22T10:35:59Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Hi all,\r\n\r\nThanks for the efforts to collect all the datasets! But I think there is a problem with the wino_bias dataset. The current link is not correct. How can I update that?\r\n\r\nThanks!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1921\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1921","id":812716042,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc3MDEzMDM4","number":1921,"title":"Standardizing datasets dtypes","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-20T22:04:01Z","updated_at":"2021-02-22T09:44:10Z","closed_at":"2021-02-22T09:44:10Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1921","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1921","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1921.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1921.patch"},"body":"This PR follows up on discussion in #1900 to have an explicit set of basic dtypes for datasets.\r\n\r\nThis moves away from str(pyarrow.DataType) as the method of choice for creating dtypes, favoring an explicit mapping to a list of supported Value dtypes.\r\n\r\nI believe in practice this should be backward compatible, since anyone previously using Value() would only have been able to use dtypes that had an identically named pyarrow factory function, which are all explicitly supported here, with `float32` and `float64` acting as the official datasets dtypes, which resolves the tension between `double` being the pyarrow dtype and `float64` being the pyarrow type factory function.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1920\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1920","id":812628220,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2OTQ5NzI2","number":1920,"title":"Fix save_to_disk issue","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-20T14:22:39Z","updated_at":"2021-02-22T10:30:11Z","closed_at":"2021-02-22T10:30:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1920","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1920","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1920.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1920.patch"},"body":"Fixes #1919 \r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1919\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1919","id":812626872,"node_id":"MDU6SXNzdWU4MTI2MjY4NzI=","number":1919,"title":"Failure to save with save_to_disk","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-20T14:18:10Z","updated_at":"2021-03-03T17:40:27Z","closed_at":"2021-03-03T17:40:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"When I try to save a dataset locally using the `save_to_disk` method I get the error:\r\n\r\n```bash\r\nFileNotFoundError: [Errno 2] No such file or directory: '\/content\/squad\/train\/squad-train.arrow'\r\n```\r\n\r\nTo replicate:\r\n\r\n1. Install `datasets` from master\r\n2. Run this code:\r\n\r\n ```python\r\n from datasets import load_dataset\r\n squad = load_dataset(\"squad\") # or any other dataset\r\n squad.save_to_disk(\"squad\") # error here\r\n ```\r\n\r\nThe problem is that the method is not creating a directory with the name `dataset_path` for saving the dataset in (i.e. it's not creating the *train* and *validation* directories in this case). After creating the directory the problem resolves.\r\nI'll open a PR soon doing that and linking this issue.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1918\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1918","id":812541510,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2ODg2OTQ0","number":1918,"title":"Fix QA4MRE download URLs","user":{"login":"M-Salti","id":9285264,"node_id":"MDQ6VXNlcjkyODUyNjQ=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/9285264?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/M-Salti","html_url":"https:\/\/github.com\/M-Salti","followers_url":"https:\/\/api.github.com\/users\/M-Salti\/followers","following_url":"https:\/\/api.github.com\/users\/M-Salti\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/M-Salti\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/M-Salti\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/M-Salti\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/M-Salti\/orgs","repos_url":"https:\/\/api.github.com\/users\/M-Salti\/repos","events_url":"https:\/\/api.github.com\/users\/M-Salti\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/M-Salti\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-20T07:32:17Z","updated_at":"2021-02-22T13:35:06Z","closed_at":"2021-02-22T13:35:06Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1918","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1918","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1918.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1918.patch"},"body":"The URLs in the `dataset_infos` and `README` are correct, only the ones in the download script needed updating.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1917\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1917","id":812390178,"node_id":"MDU6SXNzdWU4MTIzOTAxNzg=","number":1917,"title":"UnicodeDecodeError: windows 10 machine","user":{"login":"yosiasz","id":900951,"node_id":"MDQ6VXNlcjkwMDk1MQ==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/900951?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/yosiasz","html_url":"https:\/\/github.com\/yosiasz","followers_url":"https:\/\/api.github.com\/users\/yosiasz\/followers","following_url":"https:\/\/api.github.com\/users\/yosiasz\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/yosiasz\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/yosiasz\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/yosiasz\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/yosiasz\/orgs","repos_url":"https:\/\/api.github.com\/users\/yosiasz\/repos","events_url":"https:\/\/api.github.com\/users\/yosiasz\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/yosiasz\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-19T22:13:05Z","updated_at":"2021-02-19T22:41:11Z","closed_at":"2021-02-19T22:40:28Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Windows 10\r\nPhp 3.6.8\r\n\r\nwhen running\r\n\r\n```\r\nimport datasets\r\n\r\noscar_am = datasets.load_dataset(\"oscar\", \"unshuffled_deduplicated_am\")\r\nprint(oscar_am[\"train\"][0])\r\n```\r\nI get the following error\r\n\r\n```\r\nfile \"C:\\PYTHON\\3.6.8\\lib\\encodings\\cp1252.py\", line 23, in decode\r\n return codecs.charmap_decode(input,self.errors,decoding_table)[0]\r\nUnicodeDecodeError: 'charmap' codec can't decode byte 0x9d in position 58: character maps to \r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1916\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1916","id":812291984,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2NjgwNjY5","number":1916,"title":"Remove unused py_utils objects","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-19T19:51:25Z","updated_at":"2021-02-22T14:56:56Z","closed_at":"2021-02-22T13:32:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1916","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1916","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1916.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1916.patch"},"body":"Remove unused\/unnecessary py_utils functions\/classes.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1915\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1915","id":812229654,"node_id":"MDU6SXNzdWU4MTIyMjk2NTQ=","number":1915,"title":"Unable to download `wiki_dpr`","user":{"login":"nitarakad","id":18504534,"node_id":"MDQ6VXNlcjE4NTA0NTM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/18504534?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/nitarakad","html_url":"https:\/\/github.com\/nitarakad","followers_url":"https:\/\/api.github.com\/users\/nitarakad\/followers","following_url":"https:\/\/api.github.com\/users\/nitarakad\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/nitarakad\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/nitarakad\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/nitarakad\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/nitarakad\/orgs","repos_url":"https:\/\/api.github.com\/users\/nitarakad\/repos","events_url":"https:\/\/api.github.com\/users\/nitarakad\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/nitarakad\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":3,"created_at":"2021-02-19T18:11:32Z","updated_at":"2021-03-03T17:40:48Z","closed_at":"2021-03-03T17:40:48Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I am trying to download the `wiki_dpr` dataset. Specifically, I want to download `psgs_w100.multiset.no_index` with no embeddings\/no index. In order to do so, I ran:\r\n\r\n`curr_dataset = load_dataset(\"wiki_dpr\", embeddings_name=\"multiset\", index_name=\"no_index\")` \r\n\r\nHowever, I got the following error:\r\n`datasets.utils.info_utils.UnexpectedDownloadedFile: {'embeddings_index'}`\r\n\r\nI tried adding in flags `with_embeddings=False` and `with_index=False`:\r\n\r\n`curr_dataset = load_dataset(\"wiki_dpr\", with_embeddings=False, with_index=False, embeddings_name=\"multiset\", index_name=\"no_index\")`\r\n\r\nBut I got the following error:\r\n`raise ExpectedMoreDownloadedFiles(str(set(expected_checksums) - set(recorded_checksums)))\r\ndatasets.utils.info_utils.ExpectedMoreDownloadedFiles: {\u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_5\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_15\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_30\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_36\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_18\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_41\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_13\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_48\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_10\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_23\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_14\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_34\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_43\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_40\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_47\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_3\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_24\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_7\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_33\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_46\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_42\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_27\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_29\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_26\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_22\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_4\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_20\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_39\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_6\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_16\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_8\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_35\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_49\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_17\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_25\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_0\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_38\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_12\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_44\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_1\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_32\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_19\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_31\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_37\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_9\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_11\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_21\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_28\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_45\u2019, \u2018https:\/\/dl.fbaipublicfiles.com\/rag\/rag_multiset_embeddings\/wiki_passages_2\u2019}`\r\n\r\nIs there anything else I need to set to download the dataset?\r\n\r\n**UPDATE**: just running `curr_dataset = load_dataset(\"wiki_dpr\", with_embeddings=False, with_index=False)` gives me the same error.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1914\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1914","id":812149201,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2NTYyNTkz","number":1914,"title":"Fix logging imports and make all datasets use library logger","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-19T16:12:34Z","updated_at":"2021-02-21T19:48:03Z","closed_at":"2021-02-21T19:48:03Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1914","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1914","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1914.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1914.patch"},"body":"Fix library relative logging imports and make all datasets use library logger.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1913\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1913","id":812127307,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2NTQ0NjQw","number":1913,"title":"Add keep_linebreaks parameter to text loader","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-19T15:43:45Z","updated_at":"2021-02-19T18:36:12Z","closed_at":"2021-02-19T18:36:11Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1913","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1913","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1913.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1913.patch"},"body":"As asked in #870 and https:\/\/github.com\/huggingface\/transformers\/issues\/10269 there should be a parameter to keep the linebreaks when loading a text dataset.\r\ncc @sgugger @jncasey","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1912\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1912","id":812034140,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2NDY2ODQx","number":1912,"title":"Update: WMT - use mirror links","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-19T13:42:34Z","updated_at":"2021-02-24T13:44:53Z","closed_at":"2021-02-24T13:44:53Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1912","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1912","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1912.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1912.patch"},"body":"As asked in #1892 I created mirrors of the data hosted on statmt.org and updated the wmt scripts.\r\nNow downloading the wmt datasets is blazing fast :)\r\n\r\ncc @stas00 @patrickvonplaten ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1911\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1911","id":812009956,"node_id":"MDU6SXNzdWU4MTIwMDk5NTY=","number":1911,"title":"Saving processed dataset running infinitely","user":{"login":"ayubSubhaniya","id":20911334,"node_id":"MDQ6VXNlcjIwOTExMzM0","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/20911334?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ayubSubhaniya","html_url":"https:\/\/github.com\/ayubSubhaniya","followers_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/followers","following_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/orgs","repos_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/repos","events_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ayubSubhaniya\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":6,"created_at":"2021-02-19T13:09:19Z","updated_at":"2021-02-23T07:34:44Z","closed_at":null,"author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"I have a text dataset of size 220M.\r\n\r\nFor pre-processing, I need to tokenize this and filter rows with the large sequence.\r\n\r\nMy tokenization took roughly 3hrs. I used map() with batch size 1024 and multi-process with 96 processes.\r\n\r\nfilter() function was way to slow, so I used a hack to use pyarrow filter table function, which is damm fast. Mentioned [here](https:\/\/github.com\/huggingface\/datasets\/issues\/1796)\r\n\r\n```dataset._data = dataset._data.filter(...)```\r\nIt took 1 hr for the filter.\r\n\r\nThen i use `save_to_disk()` on processed dataset and it is running forever.\r\n\r\nI have been waiting since 8 hrs, it has not written a single byte. \r\n\r\nInfact it has actually read from disk more than 100GB, screenshot below shows the stats using `iotop`. \r\nSecond process is the one.\r\n\"Screenshot\r\n\r\n\r\nI am not able to figure out, whether this is some issue with dataset library or that it is due to my hack for filter() function.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1910\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1910","id":811697108,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc2MTg0MDQ3","number":1910,"title":"Adding CoNLLpp dataset.","user":{"login":"ZihanWangKi","id":21319243,"node_id":"MDQ6VXNlcjIxMzE5MjQz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/21319243?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ZihanWangKi","html_url":"https:\/\/github.com\/ZihanWangKi","followers_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/followers","following_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/orgs","repos_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/repos","events_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ZihanWangKi\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-19T05:12:30Z","updated_at":"2021-03-04T22:02:47Z","closed_at":"2021-03-04T22:02:47Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1910","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1910","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1910.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1910.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1907\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1907","id":811520569,"node_id":"MDU6SXNzdWU4MTE1MjA1Njk=","number":1907,"title":"DBPedia14 Dataset Checksum bug?","user":{"login":"francisco-perez-sorrosal","id":918006,"node_id":"MDQ6VXNlcjkxODAwNg==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/918006?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal","html_url":"https:\/\/github.com\/francisco-perez-sorrosal","followers_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/followers","following_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/orgs","repos_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/repos","events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/francisco-perez-sorrosal\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-18T22:25:48Z","updated_at":"2021-02-22T23:22:05Z","closed_at":"2021-02-22T23:22:04Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi there!!!\r\n\r\nI've been using successfully the DBPedia dataset (https:\/\/huggingface.co\/datasets\/dbpedia_14) with my codebase in the last couple of weeks, but in the last couple of days now I get this error:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \".\/conditional_classification\/basic_pipeline.py\", line 178, in \r\n main()\r\n File \".\/conditional_classification\/basic_pipeline.py\", line 128, in main\r\n corpus.load_data(limit_train_examples_per_class=args.data_args.train_examples_per_class,\r\n File \"\/home\/fp\/dev\/conditional_classification\/conditional_classification\/datasets_base.py\", line 83, in load_data\r\n datasets = load_dataset(self.name, split=dataset_split)\r\n File \"\/home\/fp\/anaconda3\/envs\/conditional\/lib\/python3.8\/site-packages\/datasets\/load.py\", line 609, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/home\/fp\/anaconda3\/envs\/conditional\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 526, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/home\/fp\/anaconda3\/envs\/conditional\/lib\/python3.8\/site-packages\/datasets\/builder.py\", line 586, in _download_and_prepare\r\n verify_checksums(\r\n File \"\/home\/fp\/anaconda3\/envs\/conditional\/lib\/python3.8\/site-packages\/datasets\/utils\/info_utils.py\", line 39, in verify_checksums\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/drive.google.com\/uc?export=download&id=0Bz8a_Dbh9QhbQ2Vic1kxMmZZQ1k']\r\n```\r\n\r\nI've seen this has happened before in other datasets as reported in #537.\r\n\r\nI've tried clearing my cache and call again `load_dataset` but still is not working. My same codebase is successfully downloading and using other datasets (e.g. AGNews) without any problem, so I guess something has happened specifically to the DBPedia dataset in the last few days. \r\n\r\nCan you please check if there's a problem with the checksums? \r\n\r\nOr this is related to any other stuff? I've seen that the path in the cache for the dataset is `\/home\/fp\/.cache\/huggingface\/datasets\/d_bpedia14\/dbpedia_14\/2.0.0\/a70413e39e7a716afd0e90c9e53cb053691f56f9ef5fe317bd07f2c368e8e897...` and includes `d_bpedia14` instead maybe of `dbpedia_14`. Was this maybe a bug introduced recently?\r\n\r\nThanks!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1906\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1906","id":811405274,"node_id":"MDU6SXNzdWU4MTE0MDUyNzQ=","number":1906,"title":"Feature Request: Support for Pandas `Categorical`","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"},{"id":2067400324,"node_id":"MDU6TGFiZWwyMDY3NDAwMzI0","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/generic%20discussion","name":"generic discussion","color":"c5def5","default":false,"description":"Generic discussion on the library"}],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-18T19:46:05Z","updated_at":"2021-02-23T14:38:50Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"```\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\nimport pyarrow\r\n\r\ndf = pd.DataFrame(pd.Series([\"a\", \"b\", \"c\", \"a\"], dtype=\"category\"))\r\npyarrow.Table.from_pandas(df)\r\nDataset.from_pandas(df)\r\n# Throws NotImplementedError\r\n# TODO(thom) this will need access to the dictionary as well (for labels). I.e. to the py_table\r\n```\r\n\r\nI'm curious if https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L796 could be built out in a way similar to `Sequence`?\r\n\r\ne.g. a `Map` class (or whatever name the maintainers might prefer) that can accept:\r\n\r\n```\r\nindex_type = generate_from_arrow_type(pa_type.index_type)\r\nvalue_type = generate_from_arrow_type(pa_type.value_type)\r\n```\r\n\r\nand then additional code points to modify:\r\n\r\n- FeatureType: https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L694\r\n- A branch to handle Map in get_nested_type: https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L719\r\n- I don't quite understand what `encode_nested_example` does but perhaps a branch there? https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L755\r\n- Similarly, I don't quite understand why `Sequence` is used this way in `generate_from_dict`, but perhaps a branch here? https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L775\r\n\r\nI couldn't find other usages of `Sequence` outside of defining specific datasets, so I'm not sure if that's a comprehensive set of touchpoints.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1905\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1905","id":811384174,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1OTIxMDk1","number":1905,"title":"Standardizing datasets.dtypes","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-18T19:15:31Z","updated_at":"2021-02-20T22:01:30Z","closed_at":"2021-02-20T22:01:30Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1905","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1905","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1905.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1905.patch"},"body":"This PR was further branched off of jdy-str-to-pyarrow-parsing, so it depends on https:\/\/github.com\/huggingface\/datasets\/pull\/1900 going first for the diff to be up-to-date (I'm not sure if there's a way for me to use jdy-str-to-pyarrow-parsing as a base branch while having it appear in the pull requests here).\r\n\r\nThis moves away from `str(pyarrow.DataType)` as the method of choice for creating dtypes, favoring an explicit mapping to a list of supported Value dtypes.\r\n\r\nI believe in practice this should be backward compatible, since anyone previously using Value() would only have been able to use dtypes that had an identically named pyarrow factory function, which are all explicitly supported here.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1904\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1904","id":811260904,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1ODE4MjA0","number":1904,"title":"Fix to_pandas for boolean ArrayXD","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-18T16:30:46Z","updated_at":"2021-02-18T17:10:03Z","closed_at":"2021-02-18T17:10:01Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1904","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1904","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1904.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1904.patch"},"body":"As noticed in #1887 the conversion of a dataset with a boolean ArrayXD feature types fails because of the underlying ListArray conversion to numpy requires `zero_copy_only=False`.\r\n\r\nzero copy is available for all primitive types except booleans\r\nsee https:\/\/arrow.apache.org\/docs\/python\/generated\/pyarrow.Array.html#pyarrow.Array.to_numpy\r\nand https:\/\/issues.apache.org\/jira\/browse\/ARROW-2871?jql=text%20~%20%22boolean%20to_numpy%22\r\n\r\ncc @SBrandeis ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1903\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1903","id":811145531,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1NzIwOTk2","number":1903,"title":"Initial commit for the addition of TIMIT dataset","user":{"login":"vrindaprabhu","id":16264631,"node_id":"MDQ6VXNlcjE2MjY0NjMx","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/16264631?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/vrindaprabhu","html_url":"https:\/\/github.com\/vrindaprabhu","followers_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/followers","following_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/orgs","repos_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/repos","events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/vrindaprabhu\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-18T14:23:12Z","updated_at":"2021-03-01T09:39:12Z","closed_at":"2021-03-01T09:39:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1903","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1903","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1903.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1903.patch"},"body":"Below points needs to be addressed:\r\n\r\n- Creation of dummy dataset is failing\r\n- Need to check on the data representation\r\n- License is not creative commons. Copyright: Portions \u00a9 1993 Trustees of the University of Pennsylvania\r\n\r\nAlso the links (_except the download_) point to the ami corpus! ;-)\r\n\r\n@patrickvonplaten Requesting your comments, will be happy to address them!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1902\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1902","id":810931171,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1NTQwMDM1","number":1902,"title":"Fix setimes_2 wmt urls","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-18T09:42:26Z","updated_at":"2021-02-18T09:55:41Z","closed_at":"2021-02-18T09:55:41Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1902","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1902","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1902.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1902.patch"},"body":"Continuation of #1901 \r\nSome other urls were missing https","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1901\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1901","id":810845605,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1NDY5MDUy","number":1901,"title":"Fix OPUS dataset download errors","user":{"login":"YangWang92","id":3883941,"node_id":"MDQ6VXNlcjM4ODM5NDE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/3883941?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/YangWang92","html_url":"https:\/\/github.com\/YangWang92","followers_url":"https:\/\/api.github.com\/users\/YangWang92\/followers","following_url":"https:\/\/api.github.com\/users\/YangWang92\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/YangWang92\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/YangWang92\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/YangWang92\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/YangWang92\/orgs","repos_url":"https:\/\/api.github.com\/users\/YangWang92\/repos","events_url":"https:\/\/api.github.com\/users\/YangWang92\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/YangWang92\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-18T07:39:41Z","updated_at":"2021-02-18T15:07:20Z","closed_at":"2021-02-18T09:39:21Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1901","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1901","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1901.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1901.patch"},"body":"Replace http to https.\r\n\r\nhttps:\/\/github.com\/huggingface\/datasets\/issues\/854\r\n\r\nhttps:\/\/discuss.huggingface.co\/t\/cannot-download-wmt16\/2081\r\n\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1900\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1900","id":810512488,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1MTkxNTc3","number":1900,"title":"Issue #1895: Bugfix for string_to_arrow timestamp[ns] support","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-17T20:26:04Z","updated_at":"2021-02-19T18:27:11Z","closed_at":"2021-02-19T18:27:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1900","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1900","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1900.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1900.patch"},"body":"Should resolve https:\/\/github.com\/huggingface\/datasets\/issues\/1895\r\n\r\nThe main part of this PR adds additional parsing in `string_to_arrow` to convert the timestamp dtypes that result from `str(pa_type)` back into the pa.DataType TimestampType.\r\n\r\nWhile adding unit-testing, I noticed that support for the double\/float types also don't invert correctly, so I added them, which I believe would hypothetically make this section of `Value` redundant:\r\n\r\n```\r\n def __post_init__(self):\r\n if self.dtype == \"double\": # fix inferred type\r\n self.dtype = \"float64\"\r\n if self.dtype == \"float\": # fix inferred type\r\n self.dtype = \"float32\"\r\n```\r\n\r\nHowever, since I think Value.dtype is part of the public interface, removing that would result in a backward-incompatible change, so I didn't muck with that.\r\n\r\nThe rest of the PR consists of docstrings that I added while developing locally so I could keep track of which functions were supposed to be inverses of each other, and thought I'd include them initially in case you want to keep them around, but I'm happy to delete or remove any of them at your request!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1899\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1899","id":810308332,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc1MDIxMjc4","number":1899,"title":"Fix: ALT - fix duplicated examples in alt-parallel","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-17T15:53:56Z","updated_at":"2021-02-17T17:20:49Z","closed_at":"2021-02-17T17:20:49Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1899","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1899","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1899.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1899.patch"},"body":"As noticed in #1898 by @10-zin the examples of the `alt-paralel` configurations have all the same values for the `translation` field.\r\nThis was due to a bad copy of a python dict.\r\n\r\nThis PR fixes that.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1898\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1898","id":810157251,"node_id":"MDU6SXNzdWU4MTAxNTcyNTE=","number":1898,"title":"ALT dataset has repeating instances in all splits","user":{"login":"10-zin","id":33179372,"node_id":"MDQ6VXNlcjMzMTc5Mzcy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33179372?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/10-zin","html_url":"https:\/\/github.com\/10-zin","followers_url":"https:\/\/api.github.com\/users\/10-zin\/followers","following_url":"https:\/\/api.github.com\/users\/10-zin\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/10-zin\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/10-zin\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/10-zin\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/10-zin\/orgs","repos_url":"https:\/\/api.github.com\/users\/10-zin\/repos","events_url":"https:\/\/api.github.com\/users\/10-zin\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/10-zin\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":4,"created_at":"2021-02-17T12:51:42Z","updated_at":"2021-02-19T06:18:46Z","closed_at":"2021-02-19T06:18:46Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"The [ALT](https:\/\/huggingface.co\/datasets\/alt) dataset has all the same instances within each split :\/\r\nSeemed like a great dataset for some experiments I wanted to carry out, especially since its medium-sized, and has all splits.\r\n\r\nWould be great if this could be fixed :)\r\n\r\nAdded a snapshot of the contents from `explore-datset` feature, for quick reference.\r\n\r\n![image](https:\/\/user-images.githubusercontent.com\/33179372\/108206321-442a2d00-714c-11eb-882f-b4b6e708ef9c.png)\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1897\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1897","id":810113263,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0ODU3MTIy","number":1897,"title":"Fix PandasArrayExtensionArray conversion to native type","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-17T11:48:24Z","updated_at":"2021-02-17T13:15:16Z","closed_at":"2021-02-17T13:15:15Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1897","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1897","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1897.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1897.patch"},"body":"To make the conversion to csv work in #1887 , we need PandasArrayExtensionArray used for multidimensional numpy arrays to be converted to pandas native types.\r\nHowever previously pandas.core.internals.ExtensionBlock.to_native_types would fail with an PandasExtensionArray because\r\n1. the PandasExtensionArray.isna method was wrong\r\n2. the conversion of a PandasExtensionArray to a numpy array with dtype=object was returning a multidimensional array while pandas excepts a 1D array in this case (more info [here](https:\/\/pandas.pydata.org\/pandas-docs\/stable\/reference\/api\/pandas.api.extensions.ExtensionArray.html#pandas.api.extensions.ExtensionArray))\r\n\r\nI fixed these two issues and now the conversion to native types works, and so is the export to csv.\r\ncc @SBrandeis ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1895\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1895","id":809630271,"node_id":"MDU6SXNzdWU4MDk2MzAyNzE=","number":1895,"title":"Bug Report: timestamp[ns] not recognized","user":{"login":"justin-yan","id":7731709,"node_id":"MDQ6VXNlcjc3MzE3MDk=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/7731709?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/justin-yan","html_url":"https:\/\/github.com\/justin-yan","followers_url":"https:\/\/api.github.com\/users\/justin-yan\/followers","following_url":"https:\/\/api.github.com\/users\/justin-yan\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/justin-yan\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/justin-yan\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/justin-yan\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/justin-yan\/orgs","repos_url":"https:\/\/api.github.com\/users\/justin-yan\/repos","events_url":"https:\/\/api.github.com\/users\/justin-yan\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/justin-yan\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-02-16T20:38:04Z","updated_at":"2021-02-19T18:27:11Z","closed_at":"2021-02-19T18:27:11Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Repro:\r\n\r\n```\r\nfrom datasets import Dataset\r\nimport pandas as pd\r\nimport pyarrow\r\n\r\ndf = pd.DataFrame(pd.date_range(\"2018-01-01\", periods=3, freq=\"H\"))\r\npyarrow.Table.from_pandas(df)\r\nDataset.from_pandas(df)\r\n# Throws ValueError: Neither timestamp[ns] nor timestamp[ns]_ seems to be a pyarrow data type.\r\n```\r\n\r\nThe factory function seems to be just \"timestamp\": https:\/\/arrow.apache.org\/docs\/python\/generated\/pyarrow.timestamp.html#pyarrow.timestamp\r\n\r\nIt seems like https:\/\/github.com\/huggingface\/datasets\/blob\/master\/src\/datasets\/features.py#L36-L43 could have a little bit of additional structure for handling these cases? I'd be happy to take a shot at opening a PR if I could receive some guidance on whether parsing something like `timestamp[ns]` and resolving it to timestamp('ns') is the goal of this method.\r\n\r\nAlternatively, if I'm using this incorrectly (e.g. is the expectation that we always provide a schema when timestamps are involved?), that would be very helpful to know as well!\r\n\r\n```\r\n$ pip list # only the relevant libraries\/versions\r\ndatasets 1.2.1\r\npandas 1.0.3\r\npyarrow 3.0.0\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1894\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1894","id":809609654,"node_id":"MDU6SXNzdWU4MDk2MDk2NTQ=","number":1894,"title":"benchmarking against MMapIndexedDataset","user":{"login":"sshleifer","id":6045025,"node_id":"MDQ6VXNlcjYwNDUwMjU=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6045025?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/sshleifer","html_url":"https:\/\/github.com\/sshleifer","followers_url":"https:\/\/api.github.com\/users\/sshleifer\/followers","following_url":"https:\/\/api.github.com\/users\/sshleifer\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/sshleifer\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/sshleifer\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/sshleifer\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/sshleifer\/orgs","repos_url":"https:\/\/api.github.com\/users\/sshleifer\/repos","events_url":"https:\/\/api.github.com\/users\/sshleifer\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/sshleifer\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-16T20:04:58Z","updated_at":"2021-02-17T18:52:28Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"I am trying to benchmark my datasets based implementation against fairseq's [`MMapIndexedDataset`](https:\/\/github.com\/pytorch\/fairseq\/blob\/master\/fairseq\/data\/indexed_dataset.py#L365) and finding that, according to psrecord, my `datasets` implem uses about 3% more CPU memory and runs 1% slower for `wikitext103` (~1GB of tokens).\r\n\r\nQuestions:\r\n1) Is this (basically identical) performance expected? \r\n2) Is there a scenario where this library will outperform `MMapIndexedDataset`? (maybe more examples\/larger examples?)\r\n3) Should I be using different benchmarking tools than `psrecord`\/how do you guys do benchmarks?\r\n\r\nThanks in advance! Sam","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1893\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1893","id":809556503,"node_id":"MDU6SXNzdWU4MDk1NTY1MDM=","number":1893,"title":"wmt19 is broken","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[{"id":2067388877,"node_id":"MDU6TGFiZWwyMDY3Mzg4ODc3","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/dataset%20bug","name":"dataset bug","color":"2edb81","default":false,"description":"A bug in a dataset script provided in the library"}],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":2,"created_at":"2021-02-16T18:39:58Z","updated_at":"2021-03-03T17:42:02Z","closed_at":"2021-03-03T17:42:02Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"1. Check which lang pairs we have: `--dataset_name wmt19`:\r\n\r\nPlease pick one among the available configs: ['cs-en', 'de-en', 'fi-en', 'gu-en', 'kk-en', 'lt-en', 'ru-en', 'zh-en', 'fr-de']\r\n\r\n \r\n2. OK, let's pick `ru-en`:\r\n\r\n`--dataset_name wmt19 --dataset_config \"ru-en\"`\r\n\r\nno cookies:\r\n\r\n```\r\nTraceback (most recent call last):\r\n File \".\/run_seq2seq.py\", line 661, in \r\n main()\r\n File \".\/run_seq2seq.py\", line 317, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 740, in load_dataset\r\n builder_instance.download_and_prepare(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/builder.py\", line 572, in download_and_prepare\r\n self._download_and_prepare(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/builder.py\", line 628, in _download_and_prepare\r\n split_generators = self._split_generators(dl_manager, **split_generators_kwargs)\r\n File \"\/home\/stas\/.cache\/huggingface\/modules\/datasets_modules\/datasets\/wmt19\/436092de5f3faaf0fc28bc84875475b384e90a5470fa6afaee11039ceddc5052\/wmt_utils.py\", line 755, in _split_generators\r\n downloaded_files = dl_manager.download_and_extract(urls_to_download)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/download_manager.py\", line 276, in download_and_extract\r\n return self.extract(self.download(url_or_urls))\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/download_manager.py\", line 191, in download\r\n downloaded_path_or_paths = map_nested(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 233, in map_nested\r\n mapped = [\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 234, in \r\n _single_map_nested((function, obj, types, None, True)) for obj in tqdm(iterable, disable=disable_tqdm)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 190, in _single_map_nested\r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 190, in \r\n mapped = [_single_map_nested((function, v, types, None, True)) for v in pbar]\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/py_utils.py\", line 172, in _single_map_nested\r\n return function(data_struct)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/download_manager.py\", line 211, in _download\r\n return cached_path(url_or_filename, download_config=download_config)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 274, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 584, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/storage.googleapis.com\/tfdataset-data\/downloadataset\/uncorpus\/UNv1.0.en-ru.tar.gz\r\n```","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1892\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1892","id":809554174,"node_id":"MDU6SXNzdWU4MDk1NTQxNzQ=","number":1892,"title":"request to mirror wmt datasets, as they are really slow to download","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":5,"created_at":"2021-02-16T18:36:11Z","updated_at":"2021-03-25T11:53:23Z","closed_at":"2021-03-25T11:53:23Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"Would it be possible to mirror the wmt data files under hf? Some of them take hours to download and not because of the local speed. They are all quite small datasets, just extremely slow to download.\r\n\r\nThank you!","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1891\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1891","id":809550001,"node_id":"MDU6SXNzdWU4MDk1NTAwMDE=","number":1891,"title":"suggestion to improve a missing dataset error","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-16T18:29:13Z","updated_at":"2021-02-16T18:30:14Z","closed_at":null,"author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":null,"body":"I was using `--dataset_name wmt19` all was good. Then thought perhaps wmt20 is out, so I tried to use `--dataset_name wmt20`, got 3 different errors (1 repeated twice), none telling me the real issue - that `wmt20` isn't in the `datasets`:\r\n\r\n```\r\nTrue, predict_with_generate=True)\r\nTraceback (most recent call last):\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 323, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 274, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 584, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/wmt20\/wmt20.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 335, in prepare_module\r\n local_path = cached_path(file_path, download_config=download_config)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 274, in cached_path\r\n output_path = get_from_cache(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/utils\/file_utils.py\", line 584, in get_from_cache\r\n raise FileNotFoundError(\"Couldn't find file at {}\".format(url))\r\nFileNotFoundError: Couldn't find file at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/wmt20\/wmt20.py\r\n\r\nDuring handling of the above exception, another exception occurred:\r\n\r\nTraceback (most recent call last):\r\n File \".\/run_seq2seq.py\", line 661, in \r\n main()\r\n File \".\/run_seq2seq.py\", line 317, in main\r\n datasets = load_dataset(data_args.dataset_name, data_args.dataset_config_name)\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 706, in load_dataset\r\n module_path, hash, resolved_file_path = prepare_module(\r\n File \"\/mnt\/nvme1\/code\/huggingface\/datasets-master\/src\/datasets\/load.py\", line 343, in prepare_module\r\n raise FileNotFoundError(\r\nFileNotFoundError: Couldn't find file locally at wmt20\/wmt20.py, or remotely at https:\/\/raw.githubusercontent.com\/huggingface\/datasets\/master\/datasets\/wmt20\/wmt20.py.\r\nThe file is also not present on the master branch on github.\r\n```\r\n\r\nSuggestion: if it is not in a local path, check that there is an actual `https:\/\/github.com\/huggingface\/datasets\/tree\/master\/datasets\/wmt20` first and assert \"dataset `wmt20` doesn't exist in datasets\", rather than trying to find a load script - since the whole repo is not there.\r\n\r\nThe error occured when running:\r\n```\r\ncd examples\/seq2seq\r\nexport BS=16; rm -r output_dir; PYTHONPATH=..\/..\/src USE_TF=0 CUDA_VISIBLE_DEVICES=0 python .\/run_seq2seq.py --model_name_or_path t5-small --output_dir output_dir --adam_eps 1e-06 --do_eval --evaluation_strategy=steps --label_smoothing 0.1 --learning_rate 3e-5 --logging_first_step --logging_steps 1000 --max_source_length 128 --max_target_length 128 --num_train_epochs 1 --overwrite_output_dir --per_device_eval_batch_size $BS --predict_with_generate --eval_steps 25000 --sortish_sampler --task translation_en_to_ro --val_max_target_length 128 --warmup_steps 500 --max_val_samples 500 --dataset_name wmt20 --dataset_config \"ro-en\" --source_prefix \"translate English to Romanian: \"\r\n```\r\n\r\nThanks.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1890\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1890","id":809395586,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MjY0OTMx","number":1890,"title":"Reformat dataset cards section titles","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-16T15:11:47Z","updated_at":"2021-02-16T15:12:34Z","closed_at":"2021-02-16T15:12:33Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1890","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1890","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1890.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1890.patch"},"body":"Titles are formatted like [Foo](#foo) instead of just Foo","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1889\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1889","id":809276015,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MTY1NDAz","number":1889,"title":"Implement to_dict and to_pandas for Dataset","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-16T12:38:19Z","updated_at":"2021-02-18T18:42:37Z","closed_at":"2021-02-18T18:42:34Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1889","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1889","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1889.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1889.patch"},"body":"With options to return a generator or the full dataset","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1888\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1888","id":809241123,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MTM2MDU4","number":1888,"title":"Docs for adding new column on formatted dataset","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-16T11:45:00Z","updated_at":"2021-03-30T14:01:03Z","closed_at":"2021-02-16T11:58:57Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1888","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1888","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1888.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1888.patch"},"body":"As mentioned in #1872 we should add in the documentation how the format gets updated when new columns are added\r\n\r\nClose #1872","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1887\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1887","id":809229809,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MTI2NTMy","number":1887,"title":"Implement to_csv for Dataset","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":5,"created_at":"2021-02-16T11:27:29Z","updated_at":"2021-02-19T09:41:59Z","closed_at":"2021-02-19T09:41:59Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1887","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1887","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1887.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1887.patch"},"body":"cc @thomwolf \r\n\r\n`to_csv` supports passing either a file path or a *binary* file object\r\nThe writing is batched to avoid loading the whole table in memory","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1886\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1886","id":809221885,"node_id":"MDExOlB1bGxSZXF1ZXN0NTc0MTE5ODcz","number":1886,"title":"Common voice","user":{"login":"BirgerMoell","id":1704131,"node_id":"MDQ6VXNlcjE3MDQxMzE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/1704131?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/BirgerMoell","html_url":"https:\/\/github.com\/BirgerMoell","followers_url":"https:\/\/api.github.com\/users\/BirgerMoell\/followers","following_url":"https:\/\/api.github.com\/users\/BirgerMoell\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/BirgerMoell\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/BirgerMoell\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/BirgerMoell\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/BirgerMoell\/orgs","repos_url":"https:\/\/api.github.com\/users\/BirgerMoell\/repos","events_url":"https:\/\/api.github.com\/users\/BirgerMoell\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/BirgerMoell\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-16T11:16:10Z","updated_at":"2021-03-09T18:51:31Z","closed_at":"2021-03-09T18:51:31Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1886","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1886","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1886.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1886.patch"},"body":"Started filling out information about the dataset and a dataset card.\r\n\r\nTo do\r\nCreate tagging file\r\nUpdate the common_voice.py file with more information","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1885\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1885","id":808881501,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczODQyNzcz","number":1885,"title":"add missing info on how to add large files","user":{"login":"stas00","id":10676103,"node_id":"MDQ6VXNlcjEwNjc2MTAz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/10676103?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/stas00","html_url":"https:\/\/github.com\/stas00","followers_url":"https:\/\/api.github.com\/users\/stas00\/followers","following_url":"https:\/\/api.github.com\/users\/stas00\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/stas00\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/stas00\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/stas00\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/stas00\/orgs","repos_url":"https:\/\/api.github.com\/users\/stas00\/repos","events_url":"https:\/\/api.github.com\/users\/stas00\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/stas00\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-15T23:46:39Z","updated_at":"2021-02-16T16:22:19Z","closed_at":"2021-02-16T11:44:12Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1885","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1885","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1885.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1885.patch"},"body":"Thanks to @lhoestq's instructions I was able to add data files to a custom dataset repo. This PR is attempting to tell others how to do the same if they need to.\r\n\r\n@lhoestq ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1884\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1884","id":808755894,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNzQwNzI5","number":1884,"title":"dtype fix when using numpy arrays","user":{"login":"bhavitvyamalik","id":19718818,"node_id":"MDQ6VXNlcjE5NzE4ODE4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/19718818?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/bhavitvyamalik","html_url":"https:\/\/github.com\/bhavitvyamalik","followers_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/followers","following_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/orgs","repos_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/repos","events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/bhavitvyamalik\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-15T18:55:25Z","updated_at":"2021-07-30T11:01:18Z","closed_at":"2021-07-30T11:01:18Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1884","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1884","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1884.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1884.patch"},"body":"As discussed in #625 this fix lets the user preserve the dtype of numpy array to pyarrow array which was getting lost due to conversion of numpy array -> list -> pyarrow array","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1883\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1883","id":808750623,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNzM2NTIz","number":1883,"title":"Add not-in-place implementations for several dataset transforms","user":{"login":"SBrandeis","id":33657802,"node_id":"MDQ6VXNlcjMzNjU3ODAy","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/33657802?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/SBrandeis","html_url":"https:\/\/github.com\/SBrandeis","followers_url":"https:\/\/api.github.com\/users\/SBrandeis\/followers","following_url":"https:\/\/api.github.com\/users\/SBrandeis\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/SBrandeis\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/SBrandeis\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/SBrandeis\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/SBrandeis\/orgs","repos_url":"https:\/\/api.github.com\/users\/SBrandeis\/repos","events_url":"https:\/\/api.github.com\/users\/SBrandeis\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/SBrandeis\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-15T18:44:26Z","updated_at":"2021-02-24T14:54:49Z","closed_at":"2021-02-24T14:53:26Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1883","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1883","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1883.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1883.patch"},"body":"Should we deprecate in-place versions of such methods?","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1882\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1882","id":808716576,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNzA4OTEw","number":1882,"title":"Create Remote Manager","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"open","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":2,"created_at":"2021-02-15T17:36:24Z","updated_at":"2021-03-08T16:15:10Z","closed_at":null,"author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1882","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1882","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1882.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1882.patch"},"body":"Refactoring to separate the concern of remote (HTTP\/FTP requests) management.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1881\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1881","id":808578200,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNTk1Nzkw","number":1881,"title":"`list_datasets()` returns a list of strings, not objects","user":{"login":"pminervini","id":227357,"node_id":"MDQ6VXNlcjIyNzM1Nw==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/227357?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/pminervini","html_url":"https:\/\/github.com\/pminervini","followers_url":"https:\/\/api.github.com\/users\/pminervini\/followers","following_url":"https:\/\/api.github.com\/users\/pminervini\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/pminervini\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/pminervini\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/pminervini\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/pminervini\/orgs","repos_url":"https:\/\/api.github.com\/users\/pminervini\/repos","events_url":"https:\/\/api.github.com\/users\/pminervini\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/pminervini\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-15T14:20:15Z","updated_at":"2021-02-15T15:09:49Z","closed_at":"2021-02-15T15:09:48Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1881","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1881","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1881.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1881.patch"},"body":"Here and there in the docs there is still stuff like this:\r\n\r\n```python\r\n>>> datasets_list = list_datasets()\r\n>>> print(', '.join(dataset.id for dataset in datasets_list))\r\n```\r\n\r\nHowever, my understanding is that `list_datasets()` returns a list of strings rather than a list of objects.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1880\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1880","id":808563439,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNTgzNjg0","number":1880,"title":"Update multi_woz_v22 checksums","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-15T14:00:18Z","updated_at":"2021-02-15T14:18:19Z","closed_at":"2021-02-15T14:18:18Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1880","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1880","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1880.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1880.patch"},"body":"As noticed in #1876 the checksums of this dataset are outdated.\r\nI updated them in this PR","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1879\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1879","id":808541442,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNTY1NDAx","number":1879,"title":"Replace flatten_nested","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-15T13:29:40Z","updated_at":"2021-02-19T18:35:14Z","closed_at":"2021-02-19T18:35:14Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1879","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1879","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1879.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1879.patch"},"body":"Replace `flatten_nested` with `NestedDataStructure.flatten`.\r\n\r\nThis is a first step towards having all NestedDataStructure logic as a separated concern, independent of the caller\/user of the data structure.\r\n\r\nEventually, all checks (whether the underlying data is list, dict, etc.) will be only inside this class.\r\n\r\nI have also generalized the flattening, and now it handles multiple levels of nesting.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1878\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1878","id":808526883,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczNTUyODk3","number":1878,"title":"Add LJ Speech dataset","user":{"login":"anton-l","id":26864830,"node_id":"MDQ6VXNlcjI2ODY0ODMw","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/26864830?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/anton-l","html_url":"https:\/\/github.com\/anton-l","followers_url":"https:\/\/api.github.com\/users\/anton-l\/followers","following_url":"https:\/\/api.github.com\/users\/anton-l\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/anton-l\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/anton-l\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/anton-l\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/anton-l\/orgs","repos_url":"https:\/\/api.github.com\/users\/anton-l\/repos","events_url":"https:\/\/api.github.com\/users\/anton-l\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/anton-l\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":3,"created_at":"2021-02-15T13:10:42Z","updated_at":"2021-02-15T19:39:41Z","closed_at":"2021-02-15T14:18:09Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1878","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1878","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1878.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1878.patch"},"body":"This PR adds the LJ Speech dataset (https:\/\/keithito.com\/LJ-Speech-Dataset\/)\r\nAs requested by #1841 \r\nThe ASR format is based on #1767 \r\n\r\nThere are a couple of quirks that should be addressed:\r\n- I tagged this dataset as `other-other-automatic-speech-recognition` and `other-other-text-to-speech` (as classified by paperswithcode). Since the number of speech datasets is about to grow, maybe these categories should be added to the main list? \r\n- Similarly to #1767 this dataset uses only a single dummy sample to reduce the zip size (`wav`s are quite heavy). Is there a plan to allow LFS or S3 usage for dummy data in the repo?\r\n- The dataset is distributed under the Public Domain license, which is not used anywhere else in the repo, AFAIK. Do you think Public Domain is worth adding to the tagger app as well?\r\n\r\nPinging @patrickvonplaten to review","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1877\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1877","id":808462272,"node_id":"MDU6SXNzdWU4MDg0NjIyNzI=","number":1877,"title":"Allow concatenation of both in-memory and on-disk datasets","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"assignees":[{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false}],"milestone":null,"comments":6,"created_at":"2021-02-15T11:39:46Z","updated_at":"2021-03-26T16:51:58Z","closed_at":"2021-03-26T16:51:58Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":null,"body":"This is a prerequisite for the addition of the `add_item` feature (see #1870).\r\nCurrently there is one assumption that we would need to change: a dataset is either fully in memory (dataset._data_files is empty), or the dataset can be reloaded from disk (using the dataset._data_files).\r\nThis assumption is used for pickling for example:\r\n- in-memory dataset can just be pickled\/unpickled in-memory\r\n- on-disk dataset can be unloaded to only keep the filepaths when pickling, and then reloaded from the disk when unpickling\r\n\r\nMaybe let's have a design that allows a Dataset to have a Table that can be rebuilt from heterogenous sources like in-memory tables or on-disk tables ? This could also be further extended in the future\r\n\r\nOne idea would be to define a list of sources and each source implements a way to reload its corresponding pyarrow Table.\r\nThen the dataset would be the concatenation of all these tables.\r\n\r\nDepending on the source type, the serialization using pickle would be different. In-memory data would be copied while on-disk data would simply be replaced by the path to these data.\r\n\r\nIf you have some ideas you would like to share about the design\/API feel free to do so :)\r\n\r\ncc @albertvillanova ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1876\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1876","id":808025859,"node_id":"MDU6SXNzdWU4MDgwMjU4NTk=","number":1876,"title":" load_dataset(\"multi_woz_v22\") NonMatchingChecksumError","user":{"login":"Vincent950129","id":5945326,"node_id":"MDQ6VXNlcjU5NDUzMjY=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/5945326?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/Vincent950129","html_url":"https:\/\/github.com\/Vincent950129","followers_url":"https:\/\/api.github.com\/users\/Vincent950129\/followers","following_url":"https:\/\/api.github.com\/users\/Vincent950129\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/Vincent950129\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/Vincent950129\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/Vincent950129\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/Vincent950129\/orgs","repos_url":"https:\/\/api.github.com\/users\/Vincent950129\/repos","events_url":"https:\/\/api.github.com\/users\/Vincent950129\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/Vincent950129\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-14T19:14:48Z","updated_at":"2021-08-04T18:08:00Z","closed_at":"2021-08-04T18:08:00Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, it seems that loading the multi_woz_v22 dataset gives a NonMatchingChecksumError.\r\n\r\nTo reproduce:\r\n\r\n`dataset = load_dataset('multi_woz_v22','v2.2_active_only',split='train')`\r\n\r\n\r\nThis will give the following error:\r\n\r\n```\r\n raise NonMatchingChecksumError(error_msg + str(bad_urls))\r\ndatasets.utils.info_utils.NonMatchingChecksumError: Checksums didn't match for dataset source files:\r\n['https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/dialog_acts.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_001.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_003.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_004.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_005.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_006.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_007.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_008.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_009.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_010.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_012.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_013.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_014.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_015.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_016.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/train\/dialogues_017.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/dev\/dialogues_001.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/dev\/dialogues_002.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/test\/dialogues_001.json', 'https:\/\/github.com\/budzianowski\/multiwoz\/raw\/master\/data\/MultiWOZ_2.2\/test\/dialogues_002.json']\r\n```\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1875\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1875","id":807887267,"node_id":"MDExOlB1bGxSZXF1ZXN0NTczMDM2NzE0","number":1875,"title":"Adding sari metric","user":{"login":"ddhruvkr","id":6061911,"node_id":"MDQ6VXNlcjYwNjE5MTE=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/6061911?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/ddhruvkr","html_url":"https:\/\/github.com\/ddhruvkr","followers_url":"https:\/\/api.github.com\/users\/ddhruvkr\/followers","following_url":"https:\/\/api.github.com\/users\/ddhruvkr\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/ddhruvkr\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/ddhruvkr\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/ddhruvkr\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/ddhruvkr\/orgs","repos_url":"https:\/\/api.github.com\/users\/ddhruvkr\/repos","events_url":"https:\/\/api.github.com\/users\/ddhruvkr\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/ddhruvkr\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-14T04:38:35Z","updated_at":"2021-02-17T15:56:27Z","closed_at":"2021-02-17T15:56:27Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1875","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1875","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1875.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1875.patch"},"body":"Adding SARI metric that is used in evaluation of text simplification. This is required as part of the GEM benchmark.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1874\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1874","id":807786094,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyOTYzMjAy","number":1874,"title":"Adding Europarl Bilingual dataset","user":{"login":"lucadiliello","id":23355969,"node_id":"MDQ6VXNlcjIzMzU1OTY5","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/23355969?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lucadiliello","html_url":"https:\/\/github.com\/lucadiliello","followers_url":"https:\/\/api.github.com\/users\/lucadiliello\/followers","following_url":"https:\/\/api.github.com\/users\/lucadiliello\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lucadiliello\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lucadiliello\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lucadiliello\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lucadiliello\/orgs","repos_url":"https:\/\/api.github.com\/users\/lucadiliello\/repos","events_url":"https:\/\/api.github.com\/users\/lucadiliello\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lucadiliello\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":7,"created_at":"2021-02-13T17:02:04Z","updated_at":"2021-03-04T10:38:22Z","closed_at":"2021-03-04T10:38:22Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1874","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1874","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1874.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1874.patch"},"body":"Implementation of Europarl bilingual dataset from described [here](https:\/\/opus.nlpl.eu\/Europarl.php).\r\n\r\nThis dataset allows to use every language pair detailed in the original dataset. The loading script manages also the small errors contained in the original dataset (in very rare cases (1 over 10M) there are some keys that references to inexistent sentences).\r\nI chose to follow the the style of a similar dataset available in this repository: `multi_para_crawl`.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1873\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1873","id":807750745,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyOTM4MTYy","number":1873,"title":"add iapp_wiki_qa_squad","user":{"login":"cstorm125","id":15519308,"node_id":"MDQ6VXNlcjE1NTE5MzA4","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/15519308?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/cstorm125","html_url":"https:\/\/github.com\/cstorm125","followers_url":"https:\/\/api.github.com\/users\/cstorm125\/followers","following_url":"https:\/\/api.github.com\/users\/cstorm125\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/cstorm125\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/cstorm125\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/cstorm125\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/cstorm125\/orgs","repos_url":"https:\/\/api.github.com\/users\/cstorm125\/repos","events_url":"https:\/\/api.github.com\/users\/cstorm125\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/cstorm125\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-13T13:34:27Z","updated_at":"2021-02-16T14:21:58Z","closed_at":"2021-02-16T14:21:58Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1873","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1873","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1873.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1873.patch"},"body":"`iapp_wiki_qa_squad` is an extractive question answering dataset from Thai Wikipedia articles.\r\nIt is adapted from [the original iapp-wiki-qa-dataset](https:\/\/github.com\/iapp-technology\/iapp-wiki-qa-dataset)\r\nto [SQuAD](https:\/\/rajpurkar.github.io\/SQuAD-explorer\/) format, resulting in\r\n5761\/742\/739 questions from 1529\/191\/192 articles.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1872\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1872","id":807711935,"node_id":"MDU6SXNzdWU4MDc3MTE5MzU=","number":1872,"title":"Adding a new column to the dataset after set_format was called","user":{"login":"villmow","id":2743060,"node_id":"MDQ6VXNlcjI3NDMwNjA=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/2743060?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/villmow","html_url":"https:\/\/github.com\/villmow","followers_url":"https:\/\/api.github.com\/users\/villmow\/followers","following_url":"https:\/\/api.github.com\/users\/villmow\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/villmow\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/villmow\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/villmow\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/villmow\/orgs","repos_url":"https:\/\/api.github.com\/users\/villmow\/repos","events_url":"https:\/\/api.github.com\/users\/villmow\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/villmow\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":4,"created_at":"2021-02-13T09:14:35Z","updated_at":"2021-03-30T14:01:45Z","closed_at":"2021-03-30T14:01:45Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, \r\n\r\nthanks for the nice library. I'm in the process of creating a custom dataset, which has a mix of tensors and lists of strings. I stumbled upon an error and want to know if its a problem on my side. \r\n\r\nI load some lists of strings and integers, then call `data.set_format(\"torch\", columns=[\"some_integer_column1\", \"some_integer_column2\"], output_all_columns=True)`. This converts the integer columns into tensors, but keeps the lists of strings as they are. I then call `map` to add a new column to my dataset, which is a **list of strings**. Once I iterate through my dataset, I get an error that the new column can't be converted into a tensor (which is probably caused by `set_format`). \r\n\r\nBelow some pseudo code:\r\n```python\r\n def augment_func(sample: Dict) -> Dict:\r\n # do something\r\n return {\r\n \"some_integer_column1\" : augmented_data[\"some_integer_column1\"], # <-- tensor\r\n \"some_integer_column2\" : augmented_data[\"some_integer_column2\"], # <-- tensor\r\n \"NEW_COLUMN\": targets, # <-- list of strings\r\n }\r\n\r\n\r\n data = datasets.load_dataset(__file__, data_dir=\"...\", split=\"train\")\r\n data.set_format(\"torch\", columns=[\"some_integer_column1\", \"some_integer_column2\"], output_all_columns=True)\r\n\r\n augmented_dataset = data.map(augment_func, batched=False)\r\n \r\n for sample in augmented_dataset:\r\n print(sample) # fails\r\n\r\n```\r\n\r\nand the exception:\r\n```python\r\nTraceback (most recent call last):\r\n File \"dataset.py\", line 487, in \r\n main()\r\n File \"dataset.py\", line 471, in main\r\n for sample in augmented_dataset:\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 697, in __iter__\r\n yield self._getitem(\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 1069, in _getitem\r\n outputs = self._convert_outputs(\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 890, in _convert_outputs\r\n v = map_nested(command, v, **map_nested_kwargs)\r\n File \"lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n return function(data_struct)\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 850, in command\r\n return [map_nested(command, i, **map_nested_kwargs) for i in x]\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 850, in \r\n return [map_nested(command, i, **map_nested_kwargs) for i in x]\r\n File \"lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n return function(data_struct)\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 850, in command\r\n return [map_nested(command, i, **map_nested_kwargs) for i in x]\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 850, in \r\n return [map_nested(command, i, **map_nested_kwargs) for i in x]\r\n File \"lib\/python3.8\/site-packages\/datasets\/utils\/py_utils.py\", line 225, in map_nested\r\n return function(data_struct)\r\n File \"lib\/python3.8\/site-packages\/datasets\/arrow_dataset.py\", line 851, in command\r\n return torch.tensor(x, **format_kwargs)\r\nTypeError: new(): invalid data type 'str'\r\n```\r\n\r\nThanks!\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1871\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1871","id":807697671,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyODk5Nzgz","number":1871,"title":"Add newspop dataset","user":{"login":"frankier","id":299380,"node_id":"MDQ6VXNlcjI5OTM4MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/299380?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/frankier","html_url":"https:\/\/github.com\/frankier","followers_url":"https:\/\/api.github.com\/users\/frankier\/followers","following_url":"https:\/\/api.github.com\/users\/frankier\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/frankier\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/frankier\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/frankier\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/frankier\/orgs","repos_url":"https:\/\/api.github.com\/users\/frankier\/repos","events_url":"https:\/\/api.github.com\/users\/frankier\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/frankier\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-13T07:31:23Z","updated_at":"2021-03-08T10:12:45Z","closed_at":"2021-03-08T10:12:45Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1871","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1871","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1871.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1871.patch"},"body":"","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1870\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1870","id":807306564,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyNTc4Mjc4","number":1870,"title":"Implement Dataset add_item","user":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"labels":[{"id":1935892871,"node_id":"MDU6TGFiZWwxOTM1ODkyODcx","url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/labels\/enhancement","name":"enhancement","color":"a2eeef","default":true,"description":"New feature or request"}],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3","html_url":"https:\/\/github.com\/huggingface\/datasets\/milestone\/3","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/milestones\/3\/labels","id":6644287,"node_id":"MDk6TWlsZXN0b25lNjY0NDI4Nw==","number":3,"title":"1.7","description":"Next minor release","creator":{"login":"albertvillanova","id":8515462,"node_id":"MDQ6VXNlcjg1MTU0NjI=","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/8515462?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/albertvillanova","html_url":"https:\/\/github.com\/albertvillanova","followers_url":"https:\/\/api.github.com\/users\/albertvillanova\/followers","following_url":"https:\/\/api.github.com\/users\/albertvillanova\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/albertvillanova\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/albertvillanova\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/albertvillanova\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/albertvillanova\/orgs","repos_url":"https:\/\/api.github.com\/users\/albertvillanova\/repos","events_url":"https:\/\/api.github.com\/users\/albertvillanova\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/albertvillanova\/received_events","type":"User","site_admin":false},"open_issues":0,"closed_issues":3,"state":"closed","created_at":"2021-04-09T13:16:31Z","updated_at":"2021-05-31T16:20:53Z","due_on":"2021-05-14T07:00:00Z","closed_at":"2021-05-31T16:20:53Z"},"comments":5,"created_at":"2021-02-12T15:03:46Z","updated_at":"2021-04-23T10:01:31Z","closed_at":"2021-04-23T10:01:31Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1870","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1870","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1870.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1870.patch"},"body":"Implement `Dataset.add_item`.\r\n\r\nClose #1854.","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1869\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1869","id":807159835,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyNDU0NTMy","number":1869,"title":"Remove outdated commands in favor of huggingface-cli","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-12T11:28:10Z","updated_at":"2021-02-12T16:13:09Z","closed_at":"2021-02-12T16:13:08Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1869","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1869","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1869.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1869.patch"},"body":"Removing the old user commands since `huggingface_hub` is going to be used instead.\r\ncc @julien-c ","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1868\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1868","id":807138159,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyNDM2MjA0","number":1868,"title":"Update oscar sizes","user":{"login":"lhoestq","id":42851186,"node_id":"MDQ6VXNlcjQyODUxMTg2","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/42851186?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/lhoestq","html_url":"https:\/\/github.com\/lhoestq","followers_url":"https:\/\/api.github.com\/users\/lhoestq\/followers","following_url":"https:\/\/api.github.com\/users\/lhoestq\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/lhoestq\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/lhoestq\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/lhoestq\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/lhoestq\/orgs","repos_url":"https:\/\/api.github.com\/users\/lhoestq\/repos","events_url":"https:\/\/api.github.com\/users\/lhoestq\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/lhoestq\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":0,"created_at":"2021-02-12T10:55:35Z","updated_at":"2021-02-12T11:03:07Z","closed_at":"2021-02-12T11:03:06Z","author_association":"MEMBER","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1868","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1868","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1868.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1868.patch"},"body":"This commit https:\/\/github.com\/huggingface\/datasets\/commit\/837a152e4724adc5308e2c4481908c00a8d93383 removed empty lines from the oscar deduplicated datasets. This PR updates the size of each deduplicated dataset to fix possible `NonMatchingSplitsSizesError` errors. cc @cahya-wirawan","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1867\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/issues\/1867","id":807127181,"node_id":"MDU6SXNzdWU4MDcxMjcxODE=","number":1867,"title":"ERROR WHEN USING SET_TRANSFORM() ","user":{"login":"alexvaca0","id":35173563,"node_id":"MDQ6VXNlcjM1MTczNTYz","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/35173563?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/alexvaca0","html_url":"https:\/\/github.com\/alexvaca0","followers_url":"https:\/\/api.github.com\/users\/alexvaca0\/followers","following_url":"https:\/\/api.github.com\/users\/alexvaca0\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/alexvaca0\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/alexvaca0\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/alexvaca0\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/alexvaca0\/orgs","repos_url":"https:\/\/api.github.com\/users\/alexvaca0\/repos","events_url":"https:\/\/api.github.com\/users\/alexvaca0\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/alexvaca0\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":8,"created_at":"2021-02-12T10:38:31Z","updated_at":"2021-03-01T14:04:24Z","closed_at":"2021-02-24T12:00:43Z","author_association":"NONE","active_lock_reason":null,"pull_request":null,"body":"Hi, I'm trying to use dataset.set_transform(encode) as @lhoestq told me in this issue: https:\/\/github.com\/huggingface\/datasets\/issues\/1825#issuecomment-774202797\r\n\r\nHowever, when I try to use Trainer from transformers with such dataset, it throws an error:\r\n\r\n```\r\nTypeError: __init__() missing 1 required positional argument: 'transform'\r\n[INFO|trainer.py:357] 2021-02-12 10:18:09,893 >> The following columns in the training set don't have a corresponding argument in `AlbertForMaskedLM.forward` and have been ignored: text.\r\nException in device=TPU:0: __init__() missing 1 required positional argument: 'transform'\r\nTraceback (most recent call last):\r\n File \"\/anaconda3\/envs\/torch-xla-1.7\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 330, in _mp_start_fn\r\n _start_fn(index, pf_cfg, fn, args)\r\n File \"\/anaconda3\/envs\/torch-xla-1.7\/lib\/python3.6\/site-packages\/torch_xla\/distributed\/xla_multiprocessing.py\", line 324, in _start_fn\r\n fn(gindex, *args)\r\n File \"\/home\/alejandro_vaca\/transformers\/examples\/language-modeling\/run_mlm_wwm.py\", line 368, in _mp_fn\r\n main()\r\n File \"\/home\/alejandro_vaca\/transformers\/examples\/language-modeling\/run_mlm_wwm.py\", line 332, in main\r\n data_collator=data_collator,\r\n File \"\/anaconda3\/envs\/torch-xla-1.7\/lib\/python3.6\/site-packages\/transformers\/trainer.py\", line 286, in __init__\r\n self._remove_unused_columns(self.train_dataset, description=\"training\")\r\n File \"\/anaconda3\/envs\/torch-xla-1.7\/lib\/python3.6\/site-packages\/transformers\/trainer.py\", line 359, in _remove_unused_columns\r\n dataset.set_format(type=dataset.format[\"type\"], columns=columns)\r\n File \"\/home\/alejandro_vaca\/datasets\/src\/datasets\/fingerprint.py\", line 312, in wrapper\r\n out = func(self, *args, **kwargs)\r\n File \"\/home\/alejandro_vaca\/datasets\/src\/datasets\/arrow_dataset.py\", line 818, in set_format\r\n _ = get_formatter(type, **format_kwargs)\r\n File \"\/home\/alejandro_vaca\/datasets\/src\/datasets\/formatting\/__init__.py\", line 112, in get_formatter\r\n return _FORMAT_TYPES[format_type](**format_kwargs)\r\nTypeError: __init__() missing 1 required positional argument: 'transform'\r\n```\r\n\r\nThe code I'm using:\r\n\r\n```{python}\r\n\r\n def tokenize_function(examples):\r\n # Remove empty lines\r\n examples[\"text\"] = [line for line in examples[\"text\"] if len(line) > 0 and not line.isspace()]\r\n return tokenizer(examples[\"text\"], padding=padding, truncation=True, max_length=data_args.max_seq_length)\r\n\r\n datasets.set_transform(tokenize_function)\r\n\r\n data_collator = DataCollatorForWholeWordMask(tokenizer=tokenizer, mlm_probability=data_args.mlm_probability)\r\n\r\n # Initialize our Trainer\r\n trainer = Trainer(\r\n model=model,\r\n args=training_args,\r\n train_dataset=datasets[\"train\"] if training_args.do_train else None,\r\n eval_dataset=datasets[\"val\"] if training_args.do_eval else None,\r\n tokenizer=tokenizer,\r\n data_collator=data_collator,\r\n )\r\n```\r\n\r\nI've installed from source, master branch.\r\n","performed_via_github_app":null} -{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866","repository_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets","labels_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866\/labels{\/name}","comments_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866\/comments","events_url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/issues\/1866\/events","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1866","id":807017816,"node_id":"MDExOlB1bGxSZXF1ZXN0NTcyMzM3NDQ1","number":1866,"title":"Add dataset for Financial PhraseBank","user":{"login":"frankier","id":299380,"node_id":"MDQ6VXNlcjI5OTM4MA==","avatar_url":"https:\/\/avatars.githubusercontent.com\/u\/299380?v=4","gravatar_id":"","url":"https:\/\/api.github.com\/users\/frankier","html_url":"https:\/\/github.com\/frankier","followers_url":"https:\/\/api.github.com\/users\/frankier\/followers","following_url":"https:\/\/api.github.com\/users\/frankier\/following{\/other_user}","gists_url":"https:\/\/api.github.com\/users\/frankier\/gists{\/gist_id}","starred_url":"https:\/\/api.github.com\/users\/frankier\/starred{\/owner}{\/repo}","subscriptions_url":"https:\/\/api.github.com\/users\/frankier\/subscriptions","organizations_url":"https:\/\/api.github.com\/users\/frankier\/orgs","repos_url":"https:\/\/api.github.com\/users\/frankier\/repos","events_url":"https:\/\/api.github.com\/users\/frankier\/events{\/privacy}","received_events_url":"https:\/\/api.github.com\/users\/frankier\/received_events","type":"User","site_admin":false},"labels":[],"state":"closed","locked":false,"assignee":null,"assignees":[],"milestone":null,"comments":1,"created_at":"2021-02-12T07:30:56Z","updated_at":"2021-02-17T14:22:36Z","closed_at":"2021-02-17T14:22:36Z","author_association":"CONTRIBUTOR","active_lock_reason":null,"pull_request":{"url":"https:\/\/api.github.com\/repos\/huggingface\/datasets\/pulls\/1866","html_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1866","diff_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1866.diff","patch_url":"https:\/\/github.com\/huggingface\/datasets\/pull\/1866.patch"},"body":"","performed_via_github_app":null} +version https://git-lfs.github.com/spec/v1 +oid sha256:3721daf05b003cd183d7c90c45043ecf606f08582bd8b7c0391b0111fc6455c1 +size 9073228