ProgramComputer commited on
Commit
8de7527
1 Parent(s): 5693d9f

Update test.py

Browse files
Files changed (1) hide show
  1. test.py +65 -4
test.py CHANGED
@@ -112,7 +112,18 @@ _PLACEHOLDER_MAPS = dict(
112
  for urls in _URLS.values()
113
  for value in ((urls["placeholder"], urls["dev"]), (urls["test"], (urls["test"],)))
114
  )
 
 
 
115
 
 
 
 
 
 
 
 
 
116
 
117
  def _mp_download(
118
  url,
@@ -190,7 +201,54 @@ class Test(datasets.GeneratorBasedBuilder):
190
  targets = (
191
  ["audio1", "audio2"] if self.config.name == "audio" else [self.config.name]
192
  )
193
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
 
195
  def download_custom(placeholder_url, path):
196
  nonlocal dl_manager
@@ -271,7 +329,7 @@ class Test(datasets.GeneratorBasedBuilder):
271
  )
272
  )
273
 
274
- mapped_paths = dl_manager.download_and_extract(
275
 
276
  dict(
277
  (
@@ -282,9 +340,12 @@ class Test(datasets.GeneratorBasedBuilder):
282
  ),
283
  )
284
  for placeholder_key in ("placeholder", "test")
285
- )
 
 
 
286
 
287
- )
288
 
289
  return [
290
  datasets.SplitGenerator(
 
112
  for urls in _URLS.values()
113
  for value in ((urls["placeholder"], urls["dev"]), (urls["test"], (urls["test"],)))
114
  )
115
+ class NestedDataStructure:
116
+ def __init__(self, data=None):
117
+ self.data = data if data is not None else []
118
 
119
+ def flatten(self, data=None):
120
+ data = data if data is not None else self.data
121
+ if isinstance(data, dict):
122
+ return self.flatten(list(data.values()))
123
+ elif isinstance(data, (list, tuple)):
124
+ return [flattened for item in data for flattened in self.flatten(item)]
125
+ else:
126
+ return [data]
127
 
128
  def _mp_download(
129
  url,
 
201
  targets = (
202
  ["audio1", "audio2"] if self.config.name == "audio" else [self.config.name]
203
  )
204
+ def self_download_custom(self, url_or_urls, custom_download):
205
+ nonlocal dl_manager
206
+ """
207
+ Download given urls(s) by calling `custom_download`.
208
+
209
+ Args:
210
+ url_or_urls (`str` or `list` or `dict`):
211
+ URL or `list` or `dict` of URLs to download and extract. Each URL is a `str`.
212
+ custom_download (`Callable[src_url, dst_path]`):
213
+ The source URL and destination path. For example
214
+ `tf.io.gfile.copy`, that lets you download from Google storage.
215
+
216
+ Returns:
217
+ downloaded_path(s): `str`, The downloaded paths matching the given input
218
+ `url_or_urls`.
219
+
220
+ Example:
221
+
222
+ ```py
223
+ >>> downloaded_files = dl_manager.download_custom('s3://my-bucket/data.zip', custom_download_for_my_private_bucket)
224
+ ```
225
+ """
226
+ cache_dir = dl_manager.download_config.cache_dir or config.DOWNLOADED_DATASETS_PATH
227
+ max_retries = dl_manager.download_config.max_retries
228
+
229
+ def url_to_downloaded_path(url):
230
+ return os.path.join(cache_dir, hash_url_to_filename(url))
231
+
232
+ downloaded_path_or_paths = map_nested(
233
+ url_to_downloaded_path, url_or_urls, disable_tqdm=not is_progress_bar_enabled()
234
+ )
235
+ url_or_urls = NestedDataStructure(url_or_urls)
236
+ downloaded_path_or_paths = NestedDataStructure(downloaded_path_or_paths)
237
+ for url, path in zip(url_or_urls.flatten(), downloaded_path_or_paths.flatten()):
238
+ try:
239
+ get_from_cache(
240
+ url, cache_dir=cache_dir, local_files_only=True, use_etag=False, max_retries=max_retries
241
+ )
242
+ cached = True
243
+ except FileNotFoundError:
244
+ cached = False
245
+ if not cached or dl_manager.download_config.force_download:
246
+ custom_download(url, path)
247
+ get_from_cache(
248
+ url, cache_dir=cache_dir, local_files_only=True, use_etag=False, max_retries=max_retries
249
+ )
250
+ dl_manager._record_sizes_checksums(url_or_urls, downloaded_path_or_paths)
251
+ return downloaded_path_or_paths.data
252
 
253
  def download_custom(placeholder_url, path):
254
  nonlocal dl_manager
 
329
  )
330
  )
331
 
332
+ mapped_paths = dl_manager.extract(self_download_custom(
333
 
334
  dict(
335
  (
 
340
  ),
341
  )
342
  for placeholder_key in ("placeholder", "test")
343
+ ),
344
+ download_custom
345
+
346
+
347
 
348
+ ))
349
 
350
  return [
351
  datasets.SplitGenerator(