cannot download

#7
by Kurapika993 - opened

Cannot download with the command
from datasets import load_dataset

dataset = load_dataset("daily_dialog")

Following error
FileNotFoundError: Unable to find 'hf://datasets/daily_dialog@469b7118f52336674f1d78693797e9fcc7e47d2a/default/train/0000.parquet' with any supported extension ['.csv', '.tsv', '.json', '.jsonl', '.parquet', '.geoparquet', '.gpq', '.arrow', '.txt', '.tar', '.blp', '.bmp', '.dib', '.bufr', '.cur', '.pcx', '.dcx', '.dds', '.ps', '.eps', '.fit', '.fits', '.fli', '.flc', '.ftc', '.ftu', '.gbr', '.gif', '.grib', '.h5', '.hdf', '.png', '.apng', '.jp2', '.j2k', '.jpc', '.jpf', '.jpx', '.j2c', '.icns', '.ico', '.im', '.iim', '.tif', '.tiff', '.jfif', '.jpe', '.jpg', '.jpeg', '.mpg', '.mpeg', '.msp', '.pcd', '.pxr', '.pbm', '.pgm', '.ppm', '.pnm', '.psd', '.bw', '.rgb', '.rgba', '.sgi', '.ras', '.tga', '.icb', '.vda', '.vst', '.webp', '.wmf', '.emf', '.xbm', '.xpm', '.BLP', '.BMP', '.DIB', '.BUFR', '.CUR', '.PCX', '.DCX', '.DDS', '.PS', '.EPS', '.FIT', '.FITS', '.FLI', '.FLC', '.FTC', '.FTU', '.GBR', '.GIF', '.GRIB', '.H5', '.HDF', '.PNG', '.APNG', '.JP2', '.J2K', '.JPC', '.JPF', '.JPX', '.J2C', '.ICNS', '.ICO', '.IM', '.IIM', '.TIF', '.TIFF', '.JFIF', '.JPE', '.JPG', '.JPEG', '.MPG', '.MPEG', '.MSP', '.PCD', '.PXR', '.PBM', '.PGM', '.PPM', '.PNM', '.PSD', '.BW', '.RGB', '.RGBA', '.SGI', '.RAS', '.TGA', '.ICB', '.VDA', '.VST', '.WEBP', '.WMF', '.EMF', '.XBM', '.XPM', '.aiff', '.au', '.avr', '.caf', '.flac', '.htk', '.svx', '.mat4', '.mat5', '.mpc2k', '.ogg', '.paf', '.pvf', '.raw', '.rf64', '.sd2', '.sds', '.ircam', '.voc', '.w64', '.wav', '.nist', '.wavex', '.wve', '.xi', '.mp3', '.opus', '.AIFF', '.AU', '.AVR', '.CAF', '.FLAC', '.HTK', '.SVX', '.MAT4', '.MAT5', '.MPC2K', '.OGG', '.PAF', '.PVF', '.RAW', '.RF64', '.SD2', '.SDS', '.IRCAM', '.VOC', '.W64', '.WAV', '.NIST', '.WAVEX', '.WVE', '.XI', '.MP3', '.OPUS', '.zip']

Click to add a cell.

Datasets Maintainers org

Possibly due to an outdated version of datasets. Can you try again after upgrading it with pip install --upgrade datasets?

It is still there, here is the full error

ileNotFoundError Traceback (most recent call last)
Cell In[1], line 3
1 from datasets import load_dataset
----> 3 dataset = load_dataset("daily_dialog")

File ~/Environment/berttopic/lib/python3.10/site-packages/datasets/load.py:2556, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, ignore_verifications, keep_in_memory, save_infos, revision, token, use_auth_token, task, streaming, num_proc, storage_options, trust_remote_code, **config_kwargs)
2551 verification_mode = VerificationMode(
2552 (verification_mode or VerificationMode.BASIC_CHECKS) if not save_infos else VerificationMode.ALL_CHECKS
2553 )
2555 # Create a dataset builder
-> 2556 builder_instance = load_dataset_builder(
2557 path=path,
2558 name=name,
2559 data_dir=data_dir,
2560 data_files=data_files,
2561 cache_dir=cache_dir,
2562 features=features,
2563 download_config=download_config,
2564 download_mode=download_mode,
2565 revision=revision,
2566 token=token,
2567 storage_options=storage_options,
2568 trust_remote_code=trust_remote_code,
2569 _require_default_config_name=name is None,
2570 **config_kwargs,
2571 )
2573 # Return iterable dataset in case of streaming
2574 if streaming:

File ~/Environment/berttopic/lib/python3.10/site-packages/datasets/load.py:2265, in load_dataset_builder(path, name, data_dir, data_files, cache_dir, features, download_config, download_mode, revision, token, use_auth_token, storage_options, trust_remote_code, _require_default_config_name, **config_kwargs)
2263 builder_cls = get_dataset_builder_class(dataset_module, dataset_name=dataset_name)
2264 # Instantiate the dataset builder
-> 2265 builder_instance: DatasetBuilder = builder_cls(
2266 cache_dir=cache_dir,
2267 dataset_name=dataset_name,
2268 config_name=config_name,
2269 data_dir=data_dir,
2270 data_files=data_files,
2271 hash=dataset_module.hash,
2272 info=info,
2273 features=features,
2274 token=token,
2275 storage_options=storage_options,
2276 **builder_kwargs,
2277 **config_kwargs,
2278 )
2279 builder_instance._use_legacy_cache_dir_if_possible(dataset_module)
2281 return builder_instance

File ~/Environment/berttopic/lib/python3.10/site-packages/datasets/builder.py:371, in DatasetBuilder.init(self, cache_dir, dataset_name, config_name, hash, base_path, info, features, token, use_auth_token, repo_id, data_files, data_dir, storage_options, writer_batch_size, name, **config_kwargs)
369 if data_dir is not None:
370 config_kwargs["data_dir"] = data_dir
--> 371 self.config, self.config_id = self._create_builder_config(
372 config_name=config_name,
373 custom_features=features,
374 **config_kwargs,
375 )
377 # prepare info: DatasetInfo are a standardized dataclass across all datasets
378 # Prefill datasetinfo
379 if info is None:
380 # TODO FOR PACKAGED MODULES IT IMPORTS DATA FROM src/packaged_modules which doesn't make sense

File ~/Environment/berttopic/lib/python3.10/site-packages/datasets/builder.py:620, in DatasetBuilder._create_builder_config(self, config_name, custom_features, **config_kwargs)
617 raise ValueError(f"BuilderConfig must have a name, got {builder_config.name}")
619 # resolve data files if needed
--> 620 builder_config._resolve_data_files(
621 base_path=self.base_path,
622 download_config=DownloadConfig(token=self.token, storage_options=self.storage_options),
623 )
625 # compute the config id that is going to be used for caching
626 config_id = builder_config.create_config_id(
627 config_kwargs,
628 custom_features=custom_features,
629 )

File ~/Environment/berttopic/lib/python3.10/site-packages/datasets/builder.py:211, in BuilderConfig._resolve_data_files(self, base_path, download_config)
209 if isinstance(self.data_files, DataFilesPatternsDict):
210 base_path = xjoin(base_path, self.data_dir) if self.data_dir else base_path
--> 211 self.data_files = self.data_files.resolve(base_path, download_config)

File ~/Environment/berttopic/lib/python3.10/site-packages/datasets/data_files.py:799, in DataFilesPatternsDict.resolve(self, base_path, download_config)
797 out = DataFilesDict()
798 for key, data_files_patterns_list in self.items():
--> 799 out[key] = data_files_patterns_list.resolve(base_path, download_config)
800 return out

File ~/Environment/berttopic/lib/python3.10/site-packages/datasets/data_files.py:752, in DataFilesPatternsList.resolve(self, base_path, download_config)
749 for pattern, allowed_extensions in zip(self, self.allowed_extensions):
750 try:
751 data_files.extend(
--> 752 resolve_pattern(
753 pattern,
754 base_path=base_path,
755 allowed_extensions=allowed_extensions,
756 download_config=download_config,
757 )
758 )
759 except FileNotFoundError:
760 if not has_magic(pattern):

File ~/Environment/berttopic/lib/python3.10/site-packages/datasets/data_files.py:393, in resolve_pattern(pattern, base_path, allowed_extensions, download_config)
391 if allowed_extensions is not None:
392 error_msg += f" with any supported extension {list(allowed_extensions)}"
--> 393 raise FileNotFoundError(error_msg)
394 return out

FileNotFoundError: Unable to find 'hf://datasets/daily_dialog@469b7118f52336674f1d78693797e9fcc7e47d2a/default/train/0000.parquet' with any supported extension ['.csv', '.tsv', '.json', '.jsonl', '.parquet', '.geoparquet', '.gpq', '.arrow', '.txt', '.tar', '.blp', '.bmp', '.dib', '.bufr', '.cur', '.pcx', '.dcx', '.dds', '.ps', '.eps', '.fit', '.fits', '.fli', '.flc', '.ftc', '.ftu', '.gbr', '.gif', '.grib', '.h5', '.hdf', '.png', '.apng', '.jp2', '.j2k', '.jpc', '.jpf', '.jpx', '.j2c', '.icns', '.ico', '.im', '.iim', '.tif', '.tiff', '.jfif', '.jpe', '.jpg', '.jpeg', '.mpg', '.mpeg', '.msp', '.pcd', '.pxr', '.pbm', '.pgm', '.ppm', '.pnm', '.psd', '.bw', '.rgb', '.rgba', '.sgi', '.ras', '.tga', '.icb', '.vda', '.vst', '.webp', '.wmf', '.emf', '.xbm', '.xpm', '.BLP', '.BMP', '.DIB', '.BUFR', '.CUR', '.PCX', '.DCX', '.DDS', '.PS', '.EPS', '.FIT', '.FITS', '.FLI', '.FLC', '.FTC', '.FTU', '.GBR', '.GIF', '.GRIB', '.H5', '.HDF', '.PNG', '.APNG', '.JP2', '.J2K', '.JPC', '.JPF', '.JPX', '.J2C', '.ICNS', '.ICO', '.IM', '.IIM', '.TIF', '.TIFF', '.JFIF', '.JPE', '.JPG', '.JPEG', '.MPG', '.MPEG', '.MSP', '.PCD', '.PXR', '.PBM', '.PGM', '.PPM', '.PNM', '.PSD', '.BW', '.RGB', '.RGBA', '.SGI', '.RAS', '.TGA', '.ICB', '.VDA', '.VST', '.WEBP', '.WMF', '.EMF', '.XBM', '.XPM', '.aiff', '.au', '.avr', '.caf', '.flac', '.htk', '.svx', '.mat4', '.mat5', '.mpc2k', '.ogg', '.paf', '.pvf', '.raw', '.rf64', '.sd2', '.sds', '.ircam', '.voc', '.w64', '.wav', '.nist', '.wavex', '.wve', '.xi', '.mp3', '.opus', '.AIFF', '.AU', '.AVR', '.CAF', '.FLAC', '.HTK', '.SVX', '.MAT4', '.MAT5', '.MPC2K', '.OGG', '.PAF', '.PVF', '.RAW', '.RF64', '.SD2', '.SDS', '.IRCAM', '.VOC', '.W64', '.WAV', '.NIST', '.WAVEX', '.WVE', '.XI', '.MP3', '.OPUS', '.zip']

Datasets Maintainers org

It is working in the older version of the datasets for me but not the latest one

Kurapika993 changed discussion status to closed

Sign up or log in to comment