Datasets:

Modalities:
Image
Languages:
English
ArXiv:
Libraries:
Datasets
License:
sayakpaul HF staff commited on
Commit
5e193c3
1 Parent(s): 12c54ce

loading of multiple shards (#5)

Browse files

- feat: dataloader with multiple shards. (592761d9480e93d5df79d8b3313c7ab6803756ca)
- chore: change to relative paths. (a918863adf9c4f13aaa06d6daf6f11e158bdad69)
- chore: add readme generated from dataset-cli test. (0e7dfb0bff5480c59af5366131dc4f6b0bd71b08)

Files changed (2) hide show
  1. README.md +16 -1
  2. nyu_depth_v2.py +25 -46
README.md CHANGED
@@ -1,3 +1,18 @@
1
  ---
2
  license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  license: apache-2.0
3
+ dataset_info:
4
+ features:
5
+ - name: image
6
+ dtype: image
7
+ - name: depth_map
8
+ dtype: image
9
+ splits:
10
+ - name: train
11
+ num_bytes: 20212097551
12
+ num_examples: 47584
13
+ - name: validation
14
+ num_bytes: 240785762
15
+ num_examples: 654
16
+ download_size: 35151124480
17
+ dataset_size: 20452883313
18
+ ---
nyu_depth_v2.py CHANGED
@@ -14,7 +14,7 @@
14
  """NYU-Depth V2."""
15
 
16
 
17
- import os
18
 
19
  import datasets
20
  import h5py
@@ -44,9 +44,8 @@ _HOMEPAGE = "https://cs.nyu.edu/~silberman/datasets/nyu_depth_v2.html"
44
  _LICENSE = "Apace 2.0 License"
45
 
46
  _URLS = {
47
- "depth_estimation": {
48
- "train/val": "http://datasets.lids.mit.edu/fastdepth/data/nyudepthv2.tar.gz",
49
- }
50
  }
51
 
52
  _IMG_EXTENSIONS = [".h5"]
@@ -57,16 +56,6 @@ class NYUDepthV2(datasets.GeneratorBasedBuilder):
57
 
58
  VERSION = datasets.Version("1.0.0")
59
 
60
- BUILDER_CONFIGS = [
61
- datasets.BuilderConfig(
62
- name="depth_estimation",
63
- version=VERSION,
64
- description="The depth estimation variant.",
65
- ),
66
- ]
67
-
68
- DEFAULT_CONFIG_NAME = "depth_estimation"
69
-
70
  def _info(self):
71
  features = datasets.Features(
72
  {"image": datasets.Image(), "depth_map": datasets.Image()}
@@ -83,52 +72,42 @@ class NYUDepthV2(datasets.GeneratorBasedBuilder):
83
  # Reference: https://github.com/dwofk/fast-depth/blob/master/dataloaders/dataloader.py#L21-L23
84
  return any(filename.endswith(extension) for extension in _IMG_EXTENSIONS)
85
 
86
- def _get_file_paths(self, dir):
87
- # Reference: https://github.com/dwofk/fast-depth/blob/master/dataloaders/dataloader.py#L31-L44
88
- file_paths = []
89
- dir = os.path.expanduser(dir)
90
-
91
- for target in sorted(os.listdir(dir)):
92
- d = os.path.join(dir, target)
93
- if not os.path.isdir(d):
94
- continue
95
- for root, _, fnames in sorted(os.walk(d)):
96
- for fname in sorted(fnames):
97
- if self._is_image_file(fname):
98
- path = os.path.join(root, fname)
99
- file_paths.append(path)
100
-
101
- return file_paths
102
-
103
- def _h5_loader(self, path):
104
  # Reference: https://github.com/dwofk/fast-depth/blob/master/dataloaders/dataloader.py#L8-L13
105
- h5f = h5py.File(path, "r")
 
106
  rgb = np.array(h5f["rgb"])
107
  rgb = np.transpose(rgb, (1, 2, 0))
108
  depth = np.array(h5f["depth"])
109
  return rgb, depth
110
 
111
  def _split_generators(self, dl_manager):
112
- urls = _URLS[self.config.name]
113
- base_path = dl_manager.download_and_extract(urls)["train/val"]
114
-
115
- train_data_files = self._get_file_paths(
116
- os.path.join(base_path, "nyudepthv2", "train")
117
- )
118
- val_data_files = self._get_file_paths(os.path.join(base_path, "nyudepthv2", "val"))
119
 
120
  return [
121
  datasets.SplitGenerator(
122
  name=datasets.Split.TRAIN,
123
- gen_kwargs={"filepaths": train_data_files},
 
 
 
 
124
  ),
125
  datasets.SplitGenerator(
126
  name=datasets.Split.VALIDATION,
127
- gen_kwargs={"filepaths": val_data_files},
 
 
 
 
128
  ),
129
  ]
130
 
131
- def _generate_examples(self, filepaths):
132
- for idx, filepath in enumerate(filepaths):
133
- image, depth = self._h5_loader(filepath)
134
- yield idx, {"image": image, "depth_map": depth}
 
 
 
 
 
14
  """NYU-Depth V2."""
15
 
16
 
17
+ import io
18
 
19
  import datasets
20
  import h5py
 
44
  _LICENSE = "Apace 2.0 License"
45
 
46
  _URLS = {
47
+ "train": [f"data/train-{i:06d}.tar" for i in range(12)],
48
+ "val": [f"data/val-{i:06d}.tar" for i in range(2)],
 
49
  }
50
 
51
  _IMG_EXTENSIONS = [".h5"]
 
56
 
57
  VERSION = datasets.Version("1.0.0")
58
 
 
 
 
 
 
 
 
 
 
 
59
  def _info(self):
60
  features = datasets.Features(
61
  {"image": datasets.Image(), "depth_map": datasets.Image()}
 
72
  # Reference: https://github.com/dwofk/fast-depth/blob/master/dataloaders/dataloader.py#L21-L23
73
  return any(filename.endswith(extension) for extension in _IMG_EXTENSIONS)
74
 
75
+ def _h5_loader(self, bytes_stream):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
  # Reference: https://github.com/dwofk/fast-depth/blob/master/dataloaders/dataloader.py#L8-L13
77
+ f = io.BytesIO(bytes_stream)
78
+ h5f = h5py.File(f, "r")
79
  rgb = np.array(h5f["rgb"])
80
  rgb = np.transpose(rgb, (1, 2, 0))
81
  depth = np.array(h5f["depth"])
82
  return rgb, depth
83
 
84
  def _split_generators(self, dl_manager):
85
+ archives = dl_manager.download(_URLS)
 
 
 
 
 
 
86
 
87
  return [
88
  datasets.SplitGenerator(
89
  name=datasets.Split.TRAIN,
90
+ gen_kwargs={
91
+ "archives": [
92
+ dl_manager.iter_archive(archive) for archive in archives["train"]
93
+ ]
94
+ },
95
  ),
96
  datasets.SplitGenerator(
97
  name=datasets.Split.VALIDATION,
98
+ gen_kwargs={
99
+ "archives": [
100
+ dl_manager.iter_archive(archive) for archive in archives["val"]
101
+ ]
102
+ },
103
  ),
104
  ]
105
 
106
+ def _generate_examples(self, archives):
107
+ idx = 0
108
+ for archive in archives:
109
+ for path, file in archive:
110
+ if self._is_image_file(path):
111
+ image, depth = self._h5_loader(file.read())
112
+ yield idx, {"image": image, "depth_map": depth}
113
+ idx += 1