Datasets:
File size: 6,051 Bytes
61109a1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 |
#
# This file is part of the SynWBM distribution (https://huggingface.co/datasets/ABC-iRobotics/SynWBM).
# Copyright (c) 2023 ABC-iRobotics.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""SynWBM dataset"""
import sys
if sys.version_info < (3, 9):
from typing import Sequence, Generator, Tuple
else:
from collections.abc import Sequence, Generator
Tuple = tuple
from typing import Optional, IO
import datasets
import itertools
# ---- Constants ----
_CITATION = """\
COMING SOON
"""
_DESCRIPTION = """\
A synthetic instance segmentation dataset for white button mushrooms (Agaricus bisporus).
The dataset incorporates rendered and generated synthetic images for training mushroom segmentation models.
"""
_HOMEPAGE = "https://huggingface.co/datasets/ABC-iRobotics/SynWBM"
_LICENSE = "GNU General Public License v3.0"
_LATEST_VERSIONS = {
"all": "1.0.0",
"blender": "1.0.0",
"sdxl": "1.0.0",
}
BASE_URL = "https://huggingface.co/datasets/ABC-iRobotics/SynWBM/resolve/main/"
# ---- SynWBM dataset Configs ----
class SynWBMDatasetConfig(datasets.BuilderConfig):
"""BuilderConfig for SynWBM dataset."""
def __init__(self, name: str, base_urls: Sequence[str], images_txt: str, version: Optional[str] = None, **kwargs):
_version = _LATEST_VERSIONS[name] if version is None else version
super(SynWBMDatasetConfig, self).__init__(version=datasets.Version(_version), name=name, **kwargs)
with open(images_txt, 'r') as f:
image_list = f.readlines()
img_urls = []
depth_urls = []
mask_urls = []
for base_url in base_urls:
img_urls.extend([base_url + image.strip() for image in image_list])
depth_urls.extend([BASE_URL + "depths/" + image.strip() for image in image_list])
mask_urls.extend([BASE_URL + "masks/" + image.strip() for image in image_list])
self._imgs_urls = img_urls
self._depth_urls = depth_urls
self._masks_urls = mask_urls
@property
def features(self):
return datasets.Features(
{
"image": datasets.Image(),
"depth": datasets.Image(),
"mask": datasets.Image(),
}
)
@property
def supervised_keys(self):
return None
# ---- SynWBM dataset Loader ----
class SynWBMDataset(datasets.GeneratorBasedBuilder):
"""SynWBM dataset."""
BUILDER_CONFIG_CLASS = SynWBMDatasetConfig
BUILDER_CONFIGS = [
SynWBMDatasetConfig(
name = "all",
description = "All images",
base_urls = [
BASE_URL + "rendered/",
BASE_URL + "generated/"
],
images_txt = "images.txt"
),
SynWBMDatasetConfig(
name = "blender",
description = "Synthetic images rendered using Blender",
base_urls = [
BASE_URL + "rendered/"
],
images_txt = "images.txt"
),
SynWBMDatasetConfig(
name = "sdxl",
description = "Synthetic images generated by Stable Diffusion XL",
base_urls = [
BASE_URL + "generated/"
],
images_txt = "images.txt"
),
]
DEFAULT_WRITER_BATCH_SIZE = 10
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=self.config.features,
supervised_keys=self.config.supervised_keys,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
version=self.config.version,
)
def _split_generators(self, dl_manager):
imgs_paths = dl_manager.download(self.config._imgs_urls)
depths_paths = dl_manager.download(self.config._depth_urls)
masks_paths = dl_manager.download(self.config._masks_urls)
imgs_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in imgs_paths])
depths_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in depths_paths])
masks_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in masks_paths])
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"images": imgs_gen,
"depths": depths_gen,
"masks": masks_gen,
},
),
]
def _generate_examples(
self,
images: Generator[Tuple[str,IO], None, None],
depths: Generator[Tuple[str,IO], None, None],
masks: Generator[Tuple[str,IO], None, None],
):
for i, (img_info, depth_info, mask_info) in enumerate(zip(images, depths, masks)):
img_file_path, img_file_obj = img_info
depth_file_path, depth_file_obj = depth_info
mask_file_path, mask_file_obj = mask_info
img_bytes = img_file_obj.read()
depth_bytes = depth_file_obj.read()
mask_bytes = mask_file_obj.read()
img_file_obj.close()
depth_file_obj.close()
mask_file_obj.close()
yield i, {
"image": {"path": img_file_path, "bytes": img_bytes},
"depth": {"path": depth_file_path, "bytes": depth_bytes},
"mask": {"path": mask_file_path, "bytes": mask_bytes},
} |