Datasets:

Modalities:
Image
Formats:
parquet
Languages:
English
DOI:
Libraries:
Datasets
Dask
License:
jpodivin commited on
Commit
8c398be
1 Parent(s): f97d1c6

Fixing mask archive path

Browse files

Signed-off-by: Jiri Podivin <jpodivin@gmail.com>

metadata_semantic_test.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:44dc86e46441a8ead49263facb19293912482a992d02d3a07754a8855994779e
3
- size 148053
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e142e80f70774bb055767f4484704f89dd7edf2dfc0db456ef4625cfe1949cf5
3
+ size 148063
metadata_semantic_train.csv CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4931badcc1fd59cc6d64bbecaba36b100205b7d25edbe456a8fd7395371d10eb
3
- size 591777
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb2a941de28584d065e441b56bbc25aaa2870e66cf3d2c506c7d2c2b4a1a3250
3
+ size 591787
plantorgans.py CHANGED
@@ -16,7 +16,7 @@ _BASE_URL = "https://huggingface.co/datasets/jpodivin/plantorgans/resolve/main/"
16
  _TRAIN_URLS = [_BASE_URL + f"sourcedata_labeled.tar.{i:02}" for i in range(0, 8)]
17
  _TEST_URLS = [_BASE_URL + f"sourcedata_labeled.tar.{i:02}" for i in range(8, 12)]
18
  _MASKS_URLS = [_BASE_URL + f"masks.tar.0{i}" for i in range(0, 2)]
19
- _SEMANTIC_MASKS_URLS = [_BASE_URL + f"semantic_masks.tar.0{i}" for i in range(0, 2)]
20
 
21
  _SEMANTIC_METADATA_URLS = {
22
  'train': 'https://huggingface.co/datasets/jpodivin/plantorgans/resolve/main/metadata_semantic_train.csv',
@@ -28,6 +28,7 @@ _PANOPTIC_METADATA_URLS = {
28
  'test': 'https://huggingface.co/datasets/jpodivin/plantorgans/resolve/main/metadata_test.csv'
29
  }
30
 
 
31
  class PlantOrgansConfig(datasets.BuilderConfig):
32
  """Builder Config for PlantOrgans"""
33
 
@@ -70,14 +71,7 @@ class PlantOrgans(datasets.GeneratorBasedBuilder):
70
  "image": datasets.Image(),
71
  "mask": datasets.Image(),
72
  "image_name": datasets.Value(dtype="string"),
73
- "class": datasets.ClassLabel(
74
- names=['Fruit', 'Leaf', 'Flower', 'Stem']),
75
  })
76
- if self.config.name == 'instance_segmentation_full':
77
- features['score'] = datasets.Value(dtype="double")
78
- else:
79
- features['class'] = datasets.ClassLabel(
80
- names=['Fruit', 'Leaf', 'Flower', 'Stem'])
81
  return datasets.DatasetInfo(
82
  description=_DESCRIPTION,
83
  features=features,
@@ -104,7 +98,7 @@ class PlantOrgans(datasets.GeneratorBasedBuilder):
104
  if self.config.name == 'instance_segmentation_full':
105
  metadata_urls = _PANOPTIC_METADATA_URLS
106
  mask_urls = _MASKS_URLS
107
- mask_glob = '/_masks/**.png'
108
  else:
109
  metadata_urls = _SEMANTIC_METADATA_URLS
110
  mask_urls = _SEMANTIC_MASKS_URLS
@@ -155,6 +149,8 @@ class PlantOrgans(datasets.GeneratorBasedBuilder):
155
 
156
  # Get all common about images and masks from csv
157
  metadata = pd.read_csv(metadata_path)
 
 
158
 
159
  # Merge dataframes
160
  metadata = metadata.merge(masks_paths, on='mask', how='inner')
@@ -168,10 +164,5 @@ class PlantOrgans(datasets.GeneratorBasedBuilder):
168
  'mask': r['mask_path'],
169
  'image': r['image_path'],
170
  'image_name': Path(r['image_path']).parts[-1],
171
- 'class': r['class']
172
  }
173
- if self.config.name == 'instance_segmentation_full':
174
- example['score'] = r['score']
175
- else:
176
- example['class'] = r['class']
177
  yield i, example
 
16
  _TRAIN_URLS = [_BASE_URL + f"sourcedata_labeled.tar.{i:02}" for i in range(0, 8)]
17
  _TEST_URLS = [_BASE_URL + f"sourcedata_labeled.tar.{i:02}" for i in range(8, 12)]
18
  _MASKS_URLS = [_BASE_URL + f"masks.tar.0{i}" for i in range(0, 2)]
19
+ _SEMANTIC_MASKS_URLS = "semantic_masks.tar.gz"
20
 
21
  _SEMANTIC_METADATA_URLS = {
22
  'train': 'https://huggingface.co/datasets/jpodivin/plantorgans/resolve/main/metadata_semantic_train.csv',
 
28
  'test': 'https://huggingface.co/datasets/jpodivin/plantorgans/resolve/main/metadata_test.csv'
29
  }
30
 
31
+
32
  class PlantOrgansConfig(datasets.BuilderConfig):
33
  """Builder Config for PlantOrgans"""
34
 
 
71
  "image": datasets.Image(),
72
  "mask": datasets.Image(),
73
  "image_name": datasets.Value(dtype="string"),
 
 
74
  })
 
 
 
 
 
75
  return datasets.DatasetInfo(
76
  description=_DESCRIPTION,
77
  features=features,
 
98
  if self.config.name == 'instance_segmentation_full':
99
  metadata_urls = _PANOPTIC_METADATA_URLS
100
  mask_urls = _MASKS_URLS
101
+ mask_glob = '/masks/**.png'
102
  else:
103
  metadata_urls = _SEMANTIC_METADATA_URLS
104
  mask_urls = _SEMANTIC_MASKS_URLS
 
149
 
150
  # Get all common about images and masks from csv
151
  metadata = pd.read_csv(metadata_path)
152
+ metadata['image'] = metadata['image_path'].apply(lambda x: str(Path(x).parts[-1]))
153
+ metadata['mask'] = metadata['mask_path'].apply(lambda x: str(Path(x).parts[-1]))
154
 
155
  # Merge dataframes
156
  metadata = metadata.merge(masks_paths, on='mask', how='inner')
 
164
  'mask': r['mask_path'],
165
  'image': r['image_path'],
166
  'image_name': Path(r['image_path']).parts[-1],
 
167
  }
 
 
 
 
168
  yield i, example