PermissionError: [WinError 32] The process cannot access the file because it is being used by another process
#3
by
Chaoses-Ib
- opened
from datasets import load_dataset
data = load_dataset('HuggingFaceM4/Caltech-101', 'without_background_category')
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
File d:\venv\lib\site-packages\datasets\builder.py:1744, in GeneratorBasedBuilder._prepare_split_single(self, gen_kwargs, fpath, file_format, max_shard_size, split_info, check_duplicate_keys, job_id)
1743 example = self.info.features.encode_example(record) if self.info.features is not None else record
-> 1744 writer.write(example, key)
1745 num_examples_progress_update += 1
File d:\venv\lib\site-packages\datasets\arrow_writer.py:499, in ArrowWriter.write(self, example, key, writer_batch_size)
497 self.hkey_record = []
--> 499 self.write_examples_on_file()
File d:\venv\lib\site-packages\datasets\arrow_writer.py:431, in ArrowWriter.write_examples_on_file(self)
430 # preserve the order the columns
--> 431 if self.schema:
432 schema_cols = set(self.schema.names)
File d:\venv\lib\site-packages\datasets\arrow_writer.py:410, in ArrowWriter.schema(self)
405 @property
406 def schema(self):
407 _schema = (
408 self._schema
409 if self._schema is not None
--> 410 else (pa.schema(self._features.type) if self._features is not None else None)
411 )
412 if self._disable_nullable and _schema is not None:
File d:\venv\lib\site-packages\datasets\features\features.py:1645, in Features.type(self)
1639 """
1640 Features field types.
1641
1642 Returns:
1643 :obj:`pyarrow.DataType`
1644 """
-> 1645 return get_nested_type(self)
File d:\venv\lib\site-packages\datasets\features\features.py:1210, in get_nested_type(schema)
1208 if isinstance(schema, Features):
1209 return pa.struct(
-> 1210 {key: get_nested_type(schema[key]) for key in schema}
1211 ) # Features is subclass of dict, and dict order is deterministic since Python 3.6
1212 elif isinstance(schema, dict):
File d:\venv\lib\site-packages\datasets\features\features.py:1210, in <dictcomp>(.0)
1208 if isinstance(schema, Features):
1209 return pa.struct(
-> 1210 {key: get_nested_type(schema[key]) for key in schema}
1211 ) # Features is subclass of dict, and dict order is deterministic since Python 3.6
1212 elif isinstance(schema, dict):
File d:\venv\lib\site-packages\datasets\features\features.py:1214, in get_nested_type(schema)
1212 elif isinstance(schema, dict):
1213 return pa.struct(
-> 1214 {key: get_nested_type(schema[key]) for key in schema}
1215 ) # however don't sort on struct types since the order matters
1216 elif isinstance(schema, (list, tuple)):
File d:\venv\lib\site-packages\datasets\features\features.py:1214, in <dictcomp>(.0)
1212 elif isinstance(schema, dict):
1213 return pa.struct(
-> 1214 {key: get_nested_type(schema[key]) for key in schema}
1215 ) # however don't sort on struct types since the order matters
1216 elif isinstance(schema, (list, tuple)):
File d:\venv\lib\site-packages\datasets\features\features.py:1229, in get_nested_type(schema)
1228 # Other objects are callable which returns their data type (ClassLabel, Array2D, Translation, Arrow datatype creation methods)
-> 1229 return schema()
File d:\venv\lib\site-packages\datasets\features\features.py:529, in _ArrayXD.__call__(self)
528 def __call__(self):
--> 529 pa_type = globals()[self.__class__.__name__ + "ExtensionType"](self.shape, self.dtype)
530 return pa_type
File d:\venv\lib\site-packages\datasets\features\features.py:646, in _ArrayXDExtensionType.__init__(self, shape, dtype)
645 if shape[dim] is None:
--> 646 raise ValueError(f"Support only dynamic size on first dimension. Got: {shape}")
647 self.shape = tuple(shape)
ValueError: Support only dynamic size on first dimension. Got: (2, None)
During handling of the above exception, another exception occurred:
ValueError Traceback (most recent call last)
File d:\venv\lib\site-packages\datasets\builder.py:1753, in GeneratorBasedBuilder._prepare_split_single(self, gen_kwargs, fpath, file_format, max_shard_size, split_info, check_duplicate_keys, job_id)
1752 num_shards = shard_id + 1
-> 1753 num_examples, num_bytes = writer.finalize()
1754 writer.close()
File d:\venv\lib\site-packages\datasets\arrow_writer.py:598, in ArrowWriter.finalize(self, close_stream)
597 self.hkey_record = []
--> 598 self.write_examples_on_file()
599 # If schema is known, infer features even if no examples were written
File d:\venv\lib\site-packages\datasets\arrow_writer.py:431, in ArrowWriter.write_examples_on_file(self)
430 # preserve the order the columns
--> 431 if self.schema:
432 schema_cols = set(self.schema.names)
File d:\venv\lib\site-packages\datasets\arrow_writer.py:410, in ArrowWriter.schema(self)
405 @property
406 def schema(self):
407 _schema = (
408 self._schema
409 if self._schema is not None
--> 410 else (pa.schema(self._features.type) if self._features is not None else None)
411 )
412 if self._disable_nullable and _schema is not None:
File d:\venv\lib\site-packages\datasets\features\features.py:1645, in Features.type(self)
1639 """
1640 Features field types.
1641
1642 Returns:
1643 :obj:`pyarrow.DataType`
1644 """
-> 1645 return get_nested_type(self)
File d:\venv\lib\site-packages\datasets\features\features.py:1210, in get_nested_type(schema)
1208 if isinstance(schema, Features):
1209 return pa.struct(
-> 1210 {key: get_nested_type(schema[key]) for key in schema}
1211 ) # Features is subclass of dict, and dict order is deterministic since Python 3.6
1212 elif isinstance(schema, dict):
File d:\venv\lib\site-packages\datasets\features\features.py:1210, in <dictcomp>(.0)
1208 if isinstance(schema, Features):
1209 return pa.struct(
-> 1210 {key: get_nested_type(schema[key]) for key in schema}
1211 ) # Features is subclass of dict, and dict order is deterministic since Python 3.6
1212 elif isinstance(schema, dict):
File d:\venv\lib\site-packages\datasets\features\features.py:1214, in get_nested_type(schema)
1212 elif isinstance(schema, dict):
1213 return pa.struct(
-> 1214 {key: get_nested_type(schema[key]) for key in schema}
1215 ) # however don't sort on struct types since the order matters
1216 elif isinstance(schema, (list, tuple)):
File d:\venv\lib\site-packages\datasets\features\features.py:1214, in <dictcomp>(.0)
1212 elif isinstance(schema, dict):
1213 return pa.struct(
-> 1214 {key: get_nested_type(schema[key]) for key in schema}
1215 ) # however don't sort on struct types since the order matters
1216 elif isinstance(schema, (list, tuple)):
File d:\venv\lib\site-packages\datasets\features\features.py:1229, in get_nested_type(schema)
1228 # Other objects are callable which returns their data type (ClassLabel, Array2D, Translation, Arrow datatype creation methods)
-> 1229 return schema()
File d:\venv\lib\site-packages\datasets\features\features.py:529, in _ArrayXD.__call__(self)
528 def __call__(self):
--> 529 pa_type = globals()[self.__class__.__name__ + "ExtensionType"](self.shape, self.dtype)
530 return pa_type
File d:\venv\lib\site-packages\datasets\features\features.py:646, in _ArrayXDExtensionType.__init__(self, shape, dtype)
645 if shape[dim] is None:
--> 646 raise ValueError(f"Support only dynamic size on first dimension. Got: {shape}")
647 self.shape = tuple(shape)
ValueError: Support only dynamic size on first dimension. Got: (2, None)
The above exception was the direct cause of the following exception:
DatasetGenerationError Traceback (most recent call last)
File d:\venv\lib\site-packages\datasets\builder.py:959, in DatasetBuilder.download_and_prepare.<locals>.incomplete_dir(dirname)
958 try:
--> 959 yield tmp_dir
960 if os.path.isdir(dirname):
File d:\venv\lib\site-packages\datasets\builder.py:1005, in DatasetBuilder.download_and_prepare(self, output_dir, download_config, download_mode, verification_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, file_format, max_shard_size, num_proc, storage_options, **download_and_prepare_kwargs)
1004 prepare_split_kwargs["num_proc"] = num_proc
-> 1005 self._download_and_prepare(
1006 dl_manager=dl_manager,
1007 verification_mode=verification_mode,
1008 **prepare_split_kwargs,
1009 **download_and_prepare_kwargs,
1010 )
1011 # Sync info
File d:\venv\lib\site-packages\datasets\builder.py:1767, in GeneratorBasedBuilder._download_and_prepare(self, dl_manager, verification_mode, **prepare_splits_kwargs)
1766 def _download_and_prepare(self, dl_manager, verification_mode, **prepare_splits_kwargs):
-> 1767 super()._download_and_prepare(
1768 dl_manager,
1769 verification_mode,
1770 check_duplicate_keys=verification_mode == VerificationMode.BASIC_CHECKS
1771 or verification_mode == VerificationMode.ALL_CHECKS,
1772 **prepare_splits_kwargs,
1773 )
File d:\venv\lib\site-packages\datasets\builder.py:1100, in DatasetBuilder._download_and_prepare(self, dl_manager, verification_mode, **prepare_split_kwargs)
1098 try:
1099 # Prepare split will record examples associated to the split
-> 1100 self._prepare_split(split_generator, **prepare_split_kwargs)
1101 except OSError as e:
File d:\venv\lib\site-packages\datasets\builder.py:1605, in GeneratorBasedBuilder._prepare_split(self, split_generator, check_duplicate_keys, file_format, num_proc, max_shard_size)
1604 with pbar:
-> 1605 for job_id, done, content in self._prepare_split_single(
1606 gen_kwargs=gen_kwargs, job_id=job_id, **_prepare_split_args
1607 ):
1608 if done:
File d:\venv\lib\site-packages\datasets\builder.py:1762, in GeneratorBasedBuilder._prepare_split_single(self, gen_kwargs, fpath, file_format, max_shard_size, split_info, check_duplicate_keys, job_id)
1761 e = e.__context__
-> 1762 raise DatasetGenerationError("An error occurred while generating the dataset") from e
1764 yield job_id, True, (total_num_examples, total_num_bytes, writer._features, num_shards, shard_lengths)
DatasetGenerationError: An error occurred while generating the dataset
During handling of the above exception, another exception occurred:
PermissionError Traceback (most recent call last)
Cell In[3], line 1
----> 1 data = load_dataset('HuggingFaceM4/Caltech-101', 'without_background_category')
File d:\venv\lib\site-packages\datasets\load.py:2582, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, ignore_verifications, keep_in_memory, save_infos, revision, token, use_auth_token, task, streaming, num_proc, storage_options, trust_remote_code, **config_kwargs)
2579 try_from_hf_gcs = path not in _PACKAGED_DATASETS_MODULES
2581 # Download and prepare data
-> 2582 builder_instance.download_and_prepare(
2583 download_config=download_config,
2584 download_mode=download_mode,
2585 verification_mode=verification_mode,
2586 try_from_hf_gcs=try_from_hf_gcs,
2587 num_proc=num_proc,
2588 storage_options=storage_options,
2589 )
2591 # Build dataset for splits
2592 keep_in_memory = (
2593 keep_in_memory if keep_in_memory is not None else is_small_dataset(builder_instance.info.dataset_size)
2594 )
File d:\venv\lib\site-packages\datasets\builder.py:985, in DatasetBuilder.download_and_prepare(self, output_dir, download_config, download_mode, verification_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, file_format, max_shard_size, num_proc, storage_options, **download_and_prepare_kwargs)
982 self._check_manual_download(dl_manager)
984 # Create a tmp dir and rename to self._output_dir on successful exit.
--> 985 with incomplete_dir(self._output_dir) as tmp_output_dir:
986 # Temporarily assign _output_dir to tmp_data_dir to avoid having to forward
987 # it to every sub function.
988 with temporary_assignment(self, "_output_dir", tmp_output_dir):
989 # Try to download the already prepared dataset files
990 downloaded_from_gcs = False
File C:\ProgramData\Miniconda3\envs\py310\lib\contextlib.py:153, in _GeneratorContextManager.__exit__(self, typ, value, traceback)
151 value = typ()
152 try:
--> 153 self.gen.throw(typ, value, traceback)
154 except StopIteration as exc:
155 # Suppress StopIteration *unless* it's the same exception that
156 # was passed to throw(). This prevents a StopIteration
157 # raised inside the "with" statement from being suppressed.
158 return exc is not value
File d:\venv\lib\site-packages\datasets\builder.py:966, in DatasetBuilder.download_and_prepare.<locals>.incomplete_dir(dirname)
964 finally:
965 if os.path.exists(tmp_dir):
--> 966 shutil.rmtree(tmp_dir)
File C:\ProgramData\Miniconda3\envs\py310\lib\shutil.py:750, in rmtree(path, ignore_errors, onerror)
748 # can't continue even if onerror hook returns
749 return
--> 750 return _rmtree_unsafe(path, onerror)
File C:\ProgramData\Miniconda3\envs\py310\lib\shutil.py:620, in _rmtree_unsafe(path, onerror)
618 os.unlink(fullname)
619 except OSError:
--> 620 onerror(os.unlink, fullname, sys.exc_info())
621 try:
622 os.rmdir(path)
File C:\ProgramData\Miniconda3\envs\py310\lib\shutil.py:618, in _rmtree_unsafe(path, onerror)
616 else:
617 try:
--> 618 os.unlink(fullname)
619 except OSError:
620 onerror(os.unlink, fullname, sys.exc_info())
PermissionError: [WinError 32] The process cannot access the file because it is being used by another process: 'C:/Users/Chaoses/.cache/huggingface/datasets/HuggingFaceM4___caltech-101/without_background_category/1.0.0/de531bad1446ca30ea620aad43702100b105c0fc5dd44fac9f87567389186803.incomplete\\caltech-101-train-00000-00000-of-NNNNN.arrow'
hi
@Chaoses-Ib
it looks like this is a cache concurrency problem.
can you try to wipe out the specific cache folder ('C:/Users/Chaoses/.cache/huggingface/datasets/HuggingFaceM4___caltech-101/
) and try again?