Datasets:

Languages:
English
Size Categories:
n>1T
ArXiv:
Tags:
DOI:
License:

Compatibility with released datatrove version

#6
by stefan-it - opened

Hi,

I just wanted to download one parquet file with datatrove using:

from datatrove.pipeline.readers import ParquetReader

data_reader = ParquetReader("hf://datasets/HuggingFaceFW/fineweb/data/CC-MAIN-2024-10", limit=1)

for document in data_reader():
    print(document)

This however, ended in an error message:

---------------------------------------------------------------------------
HTTPError                                 Traceback (most recent call last)
File ~/.venvs/flair/lib/python3.11/site-packages/huggingface_hub/utils/_errors.py:304, in hf_raise_for_status(response, endpoint_name)
    303 try:
--> 304     response.raise_for_status()
    305 except HTTPError as e:

File ~/.venvs/flair/lib/python3.11/site-packages/requests/models.py:1021, in Response.raise_for_status(self)
   1020 if http_error_msg:
-> 1021     raise HTTPError(http_error_msg, response=self)

HTTPError: 503 Server Error: Service Temporarily Unavailable for url: https://huggingface.co/api/datasets/HuggingFaceFW/fineweb/tree/main/data%2FCC-MAIN-2024-10?recursive=True&expand=True&cursor=ZXlKbWFXeGxYMjVoYldVaU9pSmtZWFJoTDBORExVMUJTVTR0TWpBeU5DMHhNQzh3TURGZk1EQXdORGt1Y0dGeWNYVmxkQ0o5OjEwMA%3D%3D

The above exception was the direct cause of the following exception:

HfHubHTTPError                            Traceback (most recent call last)
Cell In[3], line 1
----> 1 for document in data_reader():
      2     print(document)

File ~/.venvs/flair/lib/python3.11/site-packages/datatrove/pipeline/readers/base.py:186, in BaseDiskReader.run(self, data, rank, world_size)
    184 if data:
    185     yield from data
--> 186 files_shard = self.data_folder.get_shard(
    187     rank, world_size, recursive=self.recursive, glob_pattern=self.glob_pattern
    188 )
    189 if len(files_shard) == 0:
    190     if rank == 0:

File ~/.venvs/flair/lib/python3.11/site-packages/datatrove/io.py:131, in DataFolder.get_shard(self, rank, world_size, **kwargs)
    118 def get_shard(self, rank: int, world_size: int, **kwargs) -> list[str]:
    119     """Fetch a shard (set of files) for a given rank, assuming there are a total of `world_size` shards.
    120     This should be deterministic to not have any overlap among different ranks.
    121     Will return files [rank, rank+world_size, rank+2*world_size, ...]
   (...)
    129 
    130     """
--> 131     return self.list_files(**kwargs)[rank::world_size]

File ~/.venvs/flair/lib/python3.11/site-packages/datatrove/io.py:106, in DataFolder.list_files(self, subdirectory, recursive, glob_pattern)
     99 if glob_pattern and not has_magic(glob_pattern):
    100     # makes it slightly easier for file extensions
    101     glob_pattern = f"*{glob_pattern}"
    102 return sorted(
    103     [
    104         f
    105         for f, info in (
--> 106             self.find(subdirectory, maxdepth=0 if not recursive else None, detail=True)
    107             if not glob_pattern
    108             else self.glob(
    109                 self.fs.sep.join([glob_pattern, subdirectory]),
    110                 maxdepth=0 if not recursive else None,
    111                 detail=True,
    112             )
    113         ).items()
    114         if info["type"] != "directory"
    115     ]
    116 )

File ~/.venvs/flair/lib/python3.11/site-packages/fsspec/implementations/dirfs.py:304, in DirFileSystem.find(self, path, *args, **kwargs)
    302 def find(self, path, *args, **kwargs):
    303     detail = kwargs.get("detail", False)
--> 304     ret = self.fs.find(self._join(path), *args, **kwargs)
    305     if detail:
    306         return {self._relpath(path): info for path, info in ret.items()}

File ~/.venvs/flair/lib/python3.11/site-packages/huggingface_hub/hf_file_system.py:417, in HfFileSystem.find(self, path, maxdepth, withdirs, detail, refresh, revision, **kwargs)
    415 kwargs = {"expand_info": detail, **kwargs}
    416 try:
--> 417     out = self._ls_tree(path, recursive=True, refresh=refresh, revision=resolved_path.revision, **kwargs)
    418 except EntryNotFoundError:
    419     # Path could be a file
    420     if self.info(path, revision=revision, **kwargs)["type"] == "file":

File ~/.venvs/flair/lib/python3.11/site-packages/huggingface_hub/hf_file_system.py:369, in HfFileSystem._ls_tree(self, path, recursive, refresh, revision, expand_info)
    360 else:
    361     tree = self._api.list_repo_tree(
    362         resolved_path.repo_id,
    363         resolved_path.path_in_repo,
   (...)
    367         repo_type=resolved_path.repo_type,
    368     )
--> 369     for path_info in tree:
    370         if isinstance(path_info, RepoFile):
    371             cache_path_info = {
    372                 "name": root_path + "/" + path_info.path,
    373                 "size": path_info.size,
   (...)
    378                 "security": path_info.security,
    379             }

File ~/.venvs/flair/lib/python3.11/site-packages/huggingface_hub/hf_api.py:2915, in HfApi.list_repo_tree(self, repo_id, path_in_repo, recursive, expand, revision, repo_type, token)
   2913 encoded_path_in_repo = "/" + quote(path_in_repo, safe="") if path_in_repo else ""
   2914 tree_url = f"{self.endpoint}/api/{repo_type}s/{repo_id}/tree/{revision}{encoded_path_in_repo}"
-> 2915 for path_info in paginate(path=tree_url, headers=headers, params={"recursive": recursive, "expand": expand}):
   2916     yield (RepoFile(**path_info) if path_info["type"] == "file" else RepoFolder(**path_info))

File ~/.venvs/flair/lib/python3.11/site-packages/huggingface_hub/utils/_pagination.py:45, in paginate(path, params, headers)
     43 logger.debug(f"Pagination detected. Requesting next page: {next_page}")
     44 r = session.get(next_page, headers=headers)
---> 45 hf_raise_for_status(r)
     46 yield from r.json()
     47 next_page = _get_next_page(r)

File ~/.venvs/flair/lib/python3.11/site-packages/huggingface_hub/utils/_errors.py:362, in hf_raise_for_status(response, endpoint_name)
    358     raise BadRequestError(message, response=response) from e
    360 # Convert `HTTPError` into a `HfHubHTTPError` to display request information
    361 # as well (request id and/or server error message)
--> 362 raise HfHubHTTPError(str(e), response=response) from e

HfHubHTTPError: 503 Server Error: Service Temporarily Unavailable for url: https://huggingface.co/api/datasets/HuggingFaceFW/fineweb/tree/main/data%2FCC-MAIN-2024-10?recursive=True&expand=True&cursor=ZXlKbWFXeGxYMjVoYldVaU9pSmtZWFJoTDBORExVMUJTVTR0TWpBeU5DMHhNQzh3TURGZk1EQXdORGt1Y0dGeWNYVmxkQ0o5OjEwMA%3D%3D

Before that I installed latest released datatrove version:

(flair) stefan@:~$ pip3 install datatrove
Collecting datatrove
  Obtaining dependency information for datatrove from https://files.pythonhosted.org/packages/f1/60/48de987cd0e3707e8c9a3496b025aff470399fdbc61d17da1d1f2c998bfb/datatrove-0.0.1-py3-none-any.whl.metadata
  Downloading datatrove-0.0.1-py3-none-any.whl.metadata (21 kB)
Requirement already satisfied: dill>=0.3.0 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove) (0.3.8)
Requirement already satisfied: fsspec>=2023.12.2 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove) (2024.2.0)
Requirement already satisfied: huggingface-hub>=0.17.0 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove) (0.21.4)
Collecting humanize (from datatrove)
  Obtaining dependency information for humanize from https://files.pythonhosted.org/packages/aa/2b/2ae0c789fd08d5b44e745726d08a17e6d3d7d09071d05473105edc7615f2/humanize-4.9.0-py3-none-any.whl.metadata
  Downloading humanize-4.9.0-py3-none-any.whl.metadata (7.9 kB)
Collecting loguru>=0.7.0 (from datatrove)
  Obtaining dependency information for loguru>=0.7.0 from https://files.pythonhosted.org/packages/03/0a/4f6fed21aa246c6b49b561ca55facacc2a44b87d65b8b92362a8e99ba202/loguru-0.7.2-py3-none-any.whl.metadata
  Downloading loguru-0.7.2-py3-none-any.whl.metadata (23 kB)
Requirement already satisfied: multiprocess in ./.venvs/flair/lib/python3.11/site-packages (from datatrove) (0.70.16)
Requirement already satisfied: numpy>=1.25.0 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove) (1.26.3)
Requirement already satisfied: tqdm in ./.venvs/flair/lib/python3.11/site-packages (from datatrove) (4.66.2)
Requirement already satisfied: filelock in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove) (3.9.0)
Requirement already satisfied: requests in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove) (2.31.0)
Requirement already satisfied: pyyaml>=5.1 in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove) (6.0.1)
Requirement already satisfied: typing-extensions>=3.7.4.3 in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove) (4.8.0)
Requirement already satisfied: packaging>=20.9 in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove) (24.0)
Requirement already satisfied: charset-normalizer<4,>=2 in ./.venvs/flair/lib/python3.11/site-packages (from requests->huggingface-hub>=0.17.0->datatrove) (3.3.2)
Requirement already satisfied: idna<4,>=2.5 in ./.venvs/flair/lib/python3.11/site-packages (from requests->huggingface-hub>=0.17.0->datatrove) (3.6)
Requirement already satisfied: urllib3<3,>=1.21.1 in ./.venvs/flair/lib/python3.11/site-packages (from requests->huggingface-hub>=0.17.0->datatrove) (1.26.18)
Requirement already satisfied: certifi>=2017.4.17 in ./.venvs/flair/lib/python3.11/site-packages (from requests->huggingface-hub>=0.17.0->datatrove) (2024.2.2)
Downloading datatrove-0.0.1-py3-none-any.whl (16.6 MB)
   ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 16.6/16.6 MB 2.6 MB/s eta 0:00:00
Downloading loguru-0.7.2-py3-none-any.whl (62 kB)
   ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 62.5/62.5 kB 2.4 MB/s eta 0:00:00
Downloading humanize-4.9.0-py3-none-any.whl (126 kB)
   ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 126.8/126.8 kB 4.4 MB/s eta 0:00:00
Installing collected packages: loguru, humanize, datatrove
Successfully installed datatrove-0.0.1 humanize-4.9.0 loguru-0.7.2

I am not sure if this is a temporary problem, but then I installed datatrove from latest git revision using:

(flair) stefan@:~$ pip3 install git+https://github.com/huggingface/datatrove.git
Collecting git+https://github.com/huggingface/datatrove.git
  Cloning https://github.com/huggingface/datatrove.git to /tmp/pip-req-build-pc7jpstg
  Running command git clone --filter=blob:none --quiet https://github.com/huggingface/datatrove.git /tmp/pip-req-build-pc7jpstg
  Resolved https://github.com/huggingface/datatrove.git to commit 447c942f51b524a19f74f15fa05da679e2135bf2
  Installing build dependencies ... done
  Getting requirements to build wheel ... done
  Installing backend dependencies ... done
  Preparing metadata (pyproject.toml) ... done
Requirement already satisfied: dill>=0.3.0 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove==0.0.1) (0.3.8)
Requirement already satisfied: fsspec>=2023.12.2 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove==0.0.1) (2024.2.0)
Requirement already satisfied: huggingface-hub>=0.17.0 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove==0.0.1) (0.21.4)
Requirement already satisfied: humanize in ./.venvs/flair/lib/python3.11/site-packages (from datatrove==0.0.1) (4.9.0)
Requirement already satisfied: loguru>=0.7.0 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove==0.0.1) (0.7.2)
Requirement already satisfied: multiprocess in ./.venvs/flair/lib/python3.11/site-packages (from datatrove==0.0.1) (0.70.16)
Requirement already satisfied: numpy>=1.25.0 in ./.venvs/flair/lib/python3.11/site-packages (from datatrove==0.0.1) (1.26.3)
Requirement already satisfied: tqdm in ./.venvs/flair/lib/python3.11/site-packages (from datatrove==0.0.1) (4.66.2)
Requirement already satisfied: filelock in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove==0.0.1) (3.9.0)
Requirement already satisfied: requests in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove==0.0.1) (2.31.0)
Requirement already satisfied: pyyaml>=5.1 in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove==0.0.1) (6.0.1)
Requirement already satisfied: typing-extensions>=3.7.4.3 in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove==0.0.1) (4.8.0)
Requirement already satisfied: packaging>=20.9 in ./.venvs/flair/lib/python3.11/site-packages (from huggingface-hub>=0.17.0->datatrove==0.0.1) (24.0)
Requirement already satisfied: charset-normalizer<4,>=2 in ./.venvs/flair/lib/python3.11/site-packages (from requests->huggingface-hub>=0.17.0->datatrove==0.0.1) (3.3.2)
Requirement already satisfied: idna<4,>=2.5 in ./.venvs/flair/lib/python3.11/site-packages (from requests->huggingface-hub>=0.17.0->datatrove==0.0.1) (3.6)
Requirement already satisfied: urllib3<3,>=1.21.1 in ./.venvs/flair/lib/python3.11/site-packages (from requests->huggingface-hub>=0.17.0->datatrove==0.0.1) (1.26.18)
Requirement already satisfied: certifi>=2017.4.17 in ./.venvs/flair/lib/python3.11/site-packages (from requests->huggingface-hub>=0.17.0->datatrove==0.0.1) (2024.2.2)
Building wheels for collected packages: datatrove
  Building wheel for datatrove (pyproject.toml) ... done
  Created wheel for datatrove: filename=datatrove-0.0.1-py3-none-any.whl size=16646697 sha256=38578885844f8e92b67ff3bc34376644c8360e92c37590cc6a7dddf38e1fd726
  Stored in directory: /tmp/pip-ephem-wheel-cache-amoiy243/wheels/d1/c4/e1/c220bd9ceccf54ae8dfb14fc550f9a92dc794a0b54a0470b45
Successfully built datatrove
Installing collected packages: datatrove
Successfully installed datatrove-0.0.1

and it was working! So I am leaving this discussion here open, in case others have also the problem with datatrove :)

HuggingFaceFW org

You are absolutely right, I will push the latest version to pypi soon :) thank you for the reminder

HuggingFaceFW org

We've just pushed v0.2.0 of datatrove to pypi, which should fix the issue :)

guipenedo changed discussion status to closed

Sign up or log in to comment