Error While Importing Dataset

#1
by snassimiha - opened

Hello, I attempted to load the dataset using:


from datasets import load_dataset
dataset = load_dataset("openclimatefix/eumetsat_uk_hrv")


But I got the following error, do you know why that could be the case?


No config specified, defaulting to: eumetsat_uk_hrv/uk_osgb
Downloading and preparing dataset eumetsat_uk_hrv/uk_osgb to /Users/SeanNassimiha/.cache/huggingface/datasets/openclimatefix___eumetsat_uk_hrv/uk_osgb/1.2.0/f6f496ad775424fd53532a1127620eddd12ca301f2ab15e0074dc08dcdd60c3c...

ValueError Traceback (most recent call last)
File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:1204, in GeneratorBasedBuilder._prepare_split(self, split_generator, check_duplicate_keys)
1203 try:
-> 1204 for key, record in logging.tqdm(
1205 generator,
1206 unit=" examples",
1207 total=split_info.num_examples,
1208 leave=False,
1209 disable=not logging.is_progress_bar_enabled(),
1210 desc=f"Generating {split_info.name} split",
1211 ):
1212 example = self.info.features.encode_example(record)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/tqdm/notebook.py:258, in tqdm_notebook.iter(self)
257 it = super(tqdm_notebook, self).iter()
--> 258 for obj in it:
259 # return super(tqdm...) will not catch exception
260 yield obj

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/tqdm/std.py:1195, in tqdm.iter(self)
1194 try:
-> 1195 for obj in iterable:
1196 yield obj

File ~/.cache/huggingface/modules/datasets_modules/datasets/openclimatefix--eumetsat_uk_hrv/f6f496ad775424fd53532a1127620eddd12ca301f2ab15e0074dc08dcdd60c3c/eumetsat_uk_hrv.py:128, in EumetsatUkHrvDataset._generate_examples(self, filepath, time_range, split)
127 def _generate_examples(self, filepath, time_range, split):
--> 128 sat_data = xarray.open_dataset(filepath, engine="zarr", chunks='auto')
129 sat_data = sat_data.sel(time=time_range)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/xarray/backends/api.py:481, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, backend_kwargs, *args, **kwargs)
479 engine = plugins.guess_engine(filename_or_obj)
--> 481 backend = plugins.get_backend(engine)
483 decoders = _resolve_decoders_kwargs(
484 decode_cf,
485 open_backend_dataset_parameters=backend.open_dataset_parameters,
(...)
491 decode_coords=decode_coords,
492 )

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/xarray/backends/plugins.py:156, in get_backend(engine)
155 if engine not in engines:
--> 156 raise ValueError(
157 f"unrecognized engine {engine} must be one of: {list(engines)}"
158 )
159 backend = engines[engine]

ValueError: unrecognized engine zarr must be one of: ['netcdf4', 'h5netcdf', 'scipy', 'pydap', 'store']

During handling of the above exception, another exception occurred:

AttributeError Traceback (most recent call last)
Input In [2], in <cell line: 3>()
1 from datasets import load_dataset
----> 3 dataset = load_dataset("openclimatefix/eumetsat_uk_hrv")

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/load.py:1679, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, **config_kwargs)
1676 try_from_hf_gcs = path not in _PACKAGED_DATASETS_MODULES
1678 # Download and prepare data
-> 1679 builder_instance.download_and_prepare(
1680 download_config=download_config,
1681 download_mode=download_mode,
1682 ignore_verifications=ignore_verifications,
1683 try_from_hf_gcs=try_from_hf_gcs,
1684 use_auth_token=use_auth_token,
1685 )
1687 # Build dataset for splits
1688 keep_in_memory = (
1689 keep_in_memory if keep_in_memory is not None else is_small_dataset(builder_instance.info.dataset_size)
1690 )

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:704, in DatasetBuilder.download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)
702 logger.warning("HF google storage unreachable. Downloading and preparing it from source")
703 if not downloaded_from_gcs:
--> 704 self._download_and_prepare(
705 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs
706 )
707 # Sync info
708 self.info.dataset_size = sum(split.num_bytes for split in self.info.splits.values())

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:1221, in GeneratorBasedBuilder._download_and_prepare(self, dl_manager, verify_infos)
1220 def _download_and_prepare(self, dl_manager, verify_infos):
-> 1221 super()._download_and_prepare(dl_manager, verify_infos, check_duplicate_keys=verify_infos)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:793, in DatasetBuilder._download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)
789 split_dict.add(split_generator.split_info)
791 try:
792 # Prepare split will record examples associated to the split
--> 793 self._prepare_split(split_generator, **prepare_split_kwargs)
794 except OSError as e:
795 raise OSError(
796 "Cannot find data file. "
797 + (self.manual_download_instructions or "")
798 + "\nOriginal error:\n"
799 + str(e)
800 ) from None

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:1215, in GeneratorBasedBuilder._prepare_split(self, split_generator, check_duplicate_keys)
1213 writer.write(example, key)
1214 finally:
-> 1215 num_examples, num_bytes = writer.finalize()
1217 split_generator.split_info.num_examples = num_examples
1218 split_generator.split_info.num_bytes = num_bytes

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/arrow_writer.py:536, in ArrowWriter.finalize(self, close_stream)
534 if self.pa_writer is None:
535 if self.schema:
--> 536 self._build_writer(self.schema)
537 else:
538 raise ValueError("Please pass features or at least one example when writing data")

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/arrow_writer.py:352, in ArrowWriter._build_writer(self, inferred_schema)
350 def _build_writer(self, inferred_schema: pa.Schema):
351 schema = self.schema
--> 352 inferred_features = Features.from_arrow_schema(inferred_schema)
353 if self._features is not None:
354 if self.update_features: # keep original features it they match, or update them

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/features/features.py:1534, in Features.from_arrow_schema(cls, pa_schema)
1532 if "info" in metadata and "features" in metadata["info"] and metadata["info"]["features"] is not None:
1533 return Features.from_dict(metadata["info"]["features"])
-> 1534 obj = {field.name: generate_from_arrow_type(field.type) for field in pa_schema}
1535 return cls(**obj)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/features/features.py:1534, in (.0)
1532 if "info" in metadata and "features" in metadata["info"] and metadata["info"]["features"] is not None:
1533 return Features.from_dict(metadata["info"]["features"])
-> 1534 obj = {field.name: generate_from_arrow_type(field.type) for field in pa_schema}
1535 return cls(**obj)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/features/features.py:1298, in generate_from_arrow_type(pa_type)
1296 raise NotImplementedError # TODO(thom) this will need access to the dictionary as well (for labels). I.e. to the py_table
1297 elif isinstance(pa_type, pa.DataType):
-> 1298 return Value(dtype=_arrow_to_datasets_dtype(pa_type))
1299 else:
1300 raise ValueError(f"Cannot convert {pa_type} to a Feature type.")

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/features/features.py:83, in _arrow_to_datasets_dtype(arrow_type)
81 return f"time32[{arrow_type.unit}]"
82 elif pyarrow.types.is_time64(arrow_type):
---> 83 return f"time64[{arrow_type.unit}]"
84 elif pyarrow.types.is_timestamp(arrow_type):
85 if arrow_type.tz is None:

AttributeError: 'pyarrow.lib.DataType' object has no attribute 'unit'

snassimiha changed discussion status to closed
snassimiha changed discussion status to open
Open Climate Fix org

Hi, I'm not sure, I just tried it again locally and it works to generate examples. Do you have zarr installed? That might be an issue with it?

Open Climate Fix org

Oh wait, looking at the error again it is definetly that Zarr needs to be installed, which can be done with pip install zarr or conda install zarr, then it should work

Hello, thank you for your response!
I have now downloaded zarr, and even though the error trace is different, the final error thrown is the same:

No config specified, defaulting to: eumetsat_uk_hrv/uk_osgb
Downloading and preparing dataset eumetsat_uk_hrv/uk_osgb to /Users/SeanNassimiha/.cache/huggingface/datasets/openclimatefix___eumetsat_uk_hrv/uk_osgb/1.2.0/966f542331e840babf9a3753833e705239ac8a836d328b043677ae7ff3e1d7e3...

ModuleNotFoundError Traceback (most recent call last)
File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/fsspec/registry.py:228, in get_filesystem_class(protocol)
227 try:
--> 228 register_implementation(protocol, _import_class(bit["class"]))
229 except ImportError as e:

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/fsspec/registry.py:251, in _import_class(cls, minv)
250 mod, name = cls.rsplit(".", 1)
--> 251 mod = importlib.import_module(mod)
252 return getattr(mod, name)

File ~/miniforge3/envs/preprocessing/lib/python3.10/importlib/init.py:126, in import_module(name, package)
125 level += 1
--> 126 return _bootstrap._gcd_import(name[level:], package, level)

File :1050, in _gcd_import(name, package, level)

File :1027, in find_and_load(name, import)

File :1004, in find_and_load_unlocked(name, import)

ModuleNotFoundError: No module named 'gcsfs'

The above exception was the direct cause of the following exception:

ImportError Traceback (most recent call last)
File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:1204, in GeneratorBasedBuilder._prepare_split(self, split_generator, check_duplicate_keys)
1203 try:
-> 1204 for key, record in logging.tqdm(
1205 generator,
1206 unit=" examples",
1207 total=split_info.num_examples,
1208 leave=False,
1209 disable=not logging.is_progress_bar_enabled(),
1210 desc=f"Generating {split_info.name} split",
1211 ):
1212 example = self.info.features.encode_example(record)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/tqdm/notebook.py:258, in tqdm_notebook.iter(self)
257 it = super(tqdm_notebook, self).iter()
--> 258 for obj in it:
259 # return super(tqdm...) will not catch exception
260 yield obj

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/tqdm/std.py:1195, in tqdm.iter(self)
1194 try:
-> 1195 for obj in iterable:
1196 yield obj

File ~/.cache/huggingface/modules/datasets_modules/datasets/openclimatefix--eumetsat_uk_hrv/966f542331e840babf9a3753833e705239ac8a836d328b043677ae7ff3e1d7e3/eumetsat_uk_hrv.py:129, in EumetsatUkHrvDataset._generate_examples(self, filepath, time_range, split)
128 def _generate_examples(self, filepath, time_range, split):
--> 129 sat_data = xarray.open_dataset(filepath, engine="zarr", chunks='auto')
130 sat_data = sat_data.sel(time=time_range)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/xarray/backends/api.py:495, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, backend_kwargs, *args, **kwargs)
494 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 495 backend_ds = backend.open_dataset(
496 filename_or_obj,
497 drop_variables=drop_variables,
498 **decoders,
499 **kwargs,
500 )
501 ds = _dataset_from_backend_dataset(
502 backend_ds,
503 filename_or_obj,
(...)
510 **kwargs,
511 )

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/xarray/backends/zarr.py:800, in ZarrBackendEntrypoint.open_dataset(self, filename_or_obj, mask_and_scale, decode_times, concat_characters, decode_coords, drop_variables, use_cftime, decode_timedelta, group, mode, synchronizer, consolidated, chunk_store, storage_options, stacklevel)
799 filename_or_obj = _normalize_path(filename_or_obj)
--> 800 store = ZarrStore.open_group(
801 filename_or_obj,
802 group=group,
803 mode=mode,
804 synchronizer=synchronizer,
805 consolidated=consolidated,
806 consolidate_on_close=False,
807 chunk_store=chunk_store,
808 storage_options=storage_options,
809 stacklevel=stacklevel + 1,
810 )
812 store_entrypoint = StoreBackendEntrypoint()

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/xarray/backends/zarr.py:348, in ZarrStore.open_group(cls, store, mode, synchronizer, group, consolidated, consolidate_on_close, chunk_store, storage_options, append_dim, write_region, safe_chunks, stacklevel)
347 try:
--> 348 zarr_group = zarr.open_consolidated(store, **open_kwargs)
349 except KeyError:

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/zarr/convenience.py:1283, in open_consolidated(store, metadata_key, mode, **kwargs)
1282 zarr_version = kwargs.get('zarr_version', None)
-> 1283 store = normalize_store_arg(store, storage_options=kwargs.get("storage_options"), mode=mode,
1284 zarr_version=zarr_version)
1285 if mode not in {'r', 'r+'}:

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/zarr/storage.py:170, in normalize_store_arg(store, storage_options, mode, zarr_version)
169 normalize_store = _normalize_store_arg_v3
--> 170 return normalize_store(store, storage_options, mode)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/zarr/storage.py:143, in _normalize_store_arg_v2(store, storage_options, mode)
142 if "://" in store or "::" in store:
--> 143 return FSStore(store, mode=mode, **(storage_options or {}))
144 elif storage_options:

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/zarr/storage.py:1321, in FSStore.init(self, url, normalize_keys, key_separator, mode, exceptions, dimension_separator, fs, check, create, missing_exceptions, **storage_options)
1320 storage_options["auto_mkdir"] = True
-> 1321 self.map = fsspec.get_mapper(url, **{**mapper_options, **storage_options})
1322 self.fs = self.map.fs # for direct operations

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/fsspec/mapping.py:230, in get_mapper(url, check, create, missing_exceptions, alternate_root, **kwargs)
229 # Removing protocol here - could defer to each open() on the backend
--> 230 fs, urlpath = url_to_fs(url, **kwargs)
231 root = alternate_root if alternate_root is not None else urlpath

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/fsspec/core.py:408, in url_to_fs(url, **kwargs)
407 protocol = split_protocol(url)[0]
--> 408 cls = get_filesystem_class(protocol)
410 options = cls._get_kwargs_from_urls(url)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/fsspec/registry.py:230, in get_filesystem_class(protocol)
229 except ImportError as e:
--> 230 raise ImportError(bit["err"]) from e
231 cls = registry[protocol]

ImportError: Please install gcsfs to access Google Storage

During handling of the above exception, another exception occurred:

AttributeError Traceback (most recent call last)
Input In [2], in <cell line: 3>()
1 from datasets import load_dataset
----> 3 dataset = load_dataset("openclimatefix/eumetsat_uk_hrv")

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/load.py:1679, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, ignore_verifications, keep_in_memory, save_infos, revision, use_auth_token, task, streaming, **config_kwargs)
1676 try_from_hf_gcs = path not in _PACKAGED_DATASETS_MODULES
1678 # Download and prepare data
-> 1679 builder_instance.download_and_prepare(
1680 download_config=download_config,
1681 download_mode=download_mode,
1682 ignore_verifications=ignore_verifications,
1683 try_from_hf_gcs=try_from_hf_gcs,
1684 use_auth_token=use_auth_token,
1685 )
1687 # Build dataset for splits
1688 keep_in_memory = (
1689 keep_in_memory if keep_in_memory is not None else is_small_dataset(builder_instance.info.dataset_size)
1690 )

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:704, in DatasetBuilder.download_and_prepare(self, download_config, download_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, **download_and_prepare_kwargs)
702 logger.warning("HF google storage unreachable. Downloading and preparing it from source")
703 if not downloaded_from_gcs:
--> 704 self._download_and_prepare(
705 dl_manager=dl_manager, verify_infos=verify_infos, **download_and_prepare_kwargs
706 )
707 # Sync info
708 self.info.dataset_size = sum(split.num_bytes for split in self.info.splits.values())

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:1221, in GeneratorBasedBuilder._download_and_prepare(self, dl_manager, verify_infos)
1220 def _download_and_prepare(self, dl_manager, verify_infos):
-> 1221 super()._download_and_prepare(dl_manager, verify_infos, check_duplicate_keys=verify_infos)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:793, in DatasetBuilder._download_and_prepare(self, dl_manager, verify_infos, **prepare_split_kwargs)
789 split_dict.add(split_generator.split_info)
791 try:
792 # Prepare split will record examples associated to the split
--> 793 self._prepare_split(split_generator, **prepare_split_kwargs)
794 except OSError as e:
795 raise OSError(
796 "Cannot find data file. "
797 + (self.manual_download_instructions or "")
798 + "\nOriginal error:\n"
799 + str(e)
800 ) from None

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/builder.py:1215, in GeneratorBasedBuilder._prepare_split(self, split_generator, check_duplicate_keys)
1213 writer.write(example, key)
1214 finally:
-> 1215 num_examples, num_bytes = writer.finalize()
1217 split_generator.split_info.num_examples = num_examples
1218 split_generator.split_info.num_bytes = num_bytes

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/arrow_writer.py:536, in ArrowWriter.finalize(self, close_stream)
534 if self.pa_writer is None:
535 if self.schema:
--> 536 self._build_writer(self.schema)
537 else:
538 raise ValueError("Please pass features or at least one example when writing data")

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/arrow_writer.py:352, in ArrowWriter._build_writer(self, inferred_schema)
350 def _build_writer(self, inferred_schema: pa.Schema):
351 schema = self.schema
--> 352 inferred_features = Features.from_arrow_schema(inferred_schema)
353 if self._features is not None:
354 if self.update_features: # keep original features it they match, or update them

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/features/features.py:1534, in Features.from_arrow_schema(cls, pa_schema)
1532 if "info" in metadata and "features" in metadata["info"] and metadata["info"]["features"] is not None:
1533 return Features.from_dict(metadata["info"]["features"])
-> 1534 obj = {field.name: generate_from_arrow_type(field.type) for field in pa_schema}
1535 return cls(**obj)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/features/features.py:1534, in (.0)
1532 if "info" in metadata and "features" in metadata["info"] and metadata["info"]["features"] is not None:
1533 return Features.from_dict(metadata["info"]["features"])
-> 1534 obj = {field.name: generate_from_arrow_type(field.type) for field in pa_schema}
1535 return cls(**obj)

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/features/features.py:1298, in generate_from_arrow_type(pa_type)
1296 raise NotImplementedError # TODO(thom) this will need access to the dictionary as well (for labels). I.e. to the py_table
1297 elif isinstance(pa_type, pa.DataType):
-> 1298 return Value(dtype=_arrow_to_datasets_dtype(pa_type))
1299 else:
1300 raise ValueError(f"Cannot convert {pa_type} to a Feature type.")

File ~/miniforge3/envs/preprocessing/lib/python3.10/site-packages/datasets/features/features.py:83, in _arrow_to_datasets_dtype(arrow_type)
81 return f"time32[{arrow_type.unit}]"
82 elif pyarrow.types.is_time64(arrow_type):
---> 83 return f"time64[{arrow_type.unit}]"
84 elif pyarrow.types.is_timestamp(arrow_type):
85 if arrow_type.tz is None:

AttributeError: 'pyarrow.lib.DataType' object has no attribute 'unit'

Open Climate Fix org

Ah, you also need gcsfs, as it says in the ModuleError, as that's how it reads from Google Cloud

Hi! Yes you are right, I spotted that but forgot to follow up here.

snassimiha changed discussion status to closed

Sign up or log in to comment