dataset loading issue ( audio + text for Audio fine-tuning of Large language models )

#1
by pedramaa - opened

The dataset viewer is not working.

Error details:

Error code:   UnexpectedError

I have loaded my audio files along with corresponding metadata.csv for test and train data.
when I load my files in a notebook , it says

'''

----> 5 ar_egyptian["train"] = load_dataset("pedramaa/arabic-llm-egyption", split="train+test", token=True)
6 #ar_egyptian["test"] = load_dataset("pedramaa/arabic-llm-egyption", split="test", token=True)
8 print(ar_egyptian)

File ~\anaconda3\Lib\site-packages\datasets\load.py:2133, in load_dataset(path, name, data_dir, data_files, split, cache_dir, features, download_config, download_mode, verification_mode, ignore_verifications, keep_in_memory, save_infos, revision, token, use_auth_token, task, streaming, num_proc, storage_options, **config_kwargs)
2130 try_from_hf_gcs = path not in _PACKAGED_DATASETS_MODULES
2132 # Download and prepare data
-> 2133 builder_instance.download_and_prepare(
2134 download_config=download_config,
2135 download_mode=download_mode,
2136 verification_mode=verification_mode,
2137 try_from_hf_gcs=try_from_hf_gcs,
2138 num_proc=num_proc,
2139 storage_options=storage_options,
2140 )
2142 # Build dataset for splits
2143 keep_in_memory = (
2144 keep_in_memory if keep_in_memory is not None else is_small_dataset(builder_instance.info.dataset_size)
2145 )

File ~\anaconda3\Lib\site-packages\datasets\builder.py:954, in DatasetBuilder.download_and_prepare(self, output_dir, download_config, download_mode, verification_mode, ignore_verifications, try_from_hf_gcs, dl_manager, base_path, use_auth_token, file_format, max_shard_size, num_proc, storage_options, **download_and_prepare_kwargs)
952 if num_proc is not None:
953 prepare_split_kwargs["num_proc"] = num_proc
--> 954 self._download_and_prepare(
955 dl_manager=dl_manager,
956 verification_mode=verification_mode,
957 **prepare_split_kwargs,
958 **download_and_prepare_kwargs,
959 )
960 # Sync info
961 self.info.dataset_size = sum(split.num_bytes for split in self.info.splits.values())

File ~\anaconda3\Lib\site-packages\datasets\builder.py:1717, in GeneratorBasedBuilder._download_and_prepare(self, dl_manager, verification_mode, **prepare_splits_kwargs)
1716 def _download_and_prepare(self, dl_manager, verification_mode, **prepare_splits_kwargs):
-> 1717 super()._download_and_prepare(
1718 dl_manager,
1719 verification_mode,
1720 check_duplicate_keys=verification_mode == VerificationMode.BASIC_CHECKS
1721 or verification_mode == VerificationMode.ALL_CHECKS,
1722 **prepare_splits_kwargs,
1723 )

File ~\anaconda3\Lib\site-packages\datasets\builder.py:1027, in DatasetBuilder._download_and_prepare(self, dl_manager, verification_mode, **prepare_split_kwargs)
1025 split_dict = SplitDict(dataset_name=self.dataset_name)
1026 split_generators_kwargs = self._make_split_generators_kwargs(prepare_split_kwargs)
-> 1027 split_generators = self._split_generators(dl_manager, **split_generators_kwargs)
1029 # Checksums verification
1030 if verification_mode == VerificationMode.ALL_CHECKS and dl_manager.record_checksums:

File ~\anaconda3\Lib\site-packages\datasets\packaged_modules\folder_based_builder\folder_based_builder.py:175, in FolderBasedBuilder._split_generators(self, dl_manager)
172 metadata_ext = metadata_ext.pop()
174 for _, downloaded_metadata_file in itertools.chain.from_iterable(metadata_files.values()):
--> 175 pa_metadata_table = self._read_metadata(downloaded_metadata_file)
176 features_per_metadata_file.append(
177 (downloaded_metadata_file, datasets.Features.from_arrow_schema(pa_metadata_table.schema))
178 )
179 for downloaded_metadata_file, metadata_features in features_per_metadata_file:

File ~\anaconda3\Lib\site-packages\datasets\packaged_modules\folder_based_builder\folder_based_builder.py:246, in FolderBasedBuilder._read_metadata(self, metadata_file)
244 else:
245 with open(metadata_file, "rb") as f:
--> 246 return paj.read_json(f)

File ~\anaconda3\Lib\site-packages\pyarrow_json.pyx:259, in pyarrow._json.read_json()

File ~\anaconda3\Lib\site-packages\pyarrow\error.pxi:144, in pyarrow.lib.pyarrow_internal_check_status()

File ~\anaconda3\Lib\site-packages\pyarrow\error.pxi:100, in pyarrow.lib.check_status()

ArrowInvalid: JSON parse error: Invalid value. in row 0

'''

cc @albertvillanova @lhoestq @severo .

We fixed the issue (it was due to the encoding that defaults to utf-8 in pyarrow)

lhoestq changed discussion status to closed

Sign up or log in to comment