|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
"""SMVB dataset""" |
|
|
|
import sys |
|
import io |
|
import numpy as np |
|
if sys.version_info < (3, 9): |
|
from typing import Sequence, Generator, Tuple |
|
else: |
|
from collections.abc import Sequence, Generator |
|
Tuple = tuple |
|
|
|
from typing import Optional, IO |
|
|
|
import datasets |
|
import itertools |
|
from huggingface_hub import HfFileSystem |
|
|
|
|
|
|
|
|
|
_CITATION = """\ |
|
@INPROCEEDINGS{karoly2024synthetic, |
|
author={Károly, Artúr I. and Nádas, Imre and Galambos, Péter}, |
|
booktitle={2024 IEEE 22nd World Symposium on Applied Machine Intelligence and Informatics (SAMI)}, |
|
title={Synthetic Multimodal Video Benchmark (SMVB): Utilizing Blender for rich dataset generation}, |
|
year={2024}, |
|
volume={}, |
|
number={}, |
|
pages={}, |
|
doi={}} |
|
|
|
""" |
|
|
|
_DESCRIPTION = """\ |
|
Amultimodal video benchmark for evaluating models in multi-task learning scenarios. |
|
""" |
|
|
|
_HOMEPAGE = "https://huggingface.co/ABC-iRobotics/SMVB" |
|
|
|
_LICENSE = "GNU General Public License v3.0" |
|
|
|
_BASE_URL = "https://huggingface.co/" |
|
_REPO = "datasets/ABC-iRobotics/SMVB" |
|
_RESOURCE = "/resolve/main" |
|
|
|
_VERSION = "1.0.0" |
|
|
|
|
|
|
|
|
|
class SMVBDatasetConfig(datasets.BuilderConfig): |
|
"""BuilderConfig for SMVB dataset.""" |
|
|
|
def __init__(self, name: str, version: Optional[str] = None, **kwargs): |
|
super(SMVBDatasetConfig, self).__init__(version=datasets.Version(version), name=name, **kwargs) |
|
fs = HfFileSystem() |
|
tarfiles = sorted(fs.glob(_REPO + "/**.tar.gz")) |
|
self._data_urls = [p.replace(_REPO,_BASE_URL+_REPO+_RESOURCE) for p in tarfiles] |
|
|
|
@property |
|
def features(self): |
|
return datasets.Features( |
|
{ |
|
"image": datasets.Image(), |
|
"mask": datasets.Image(), |
|
"depth": datasets.Sequence(datasets.Value("float32")), |
|
"flow": datasets.Sequence(datasets.Value("float32")), |
|
"normal": datasets.Sequence(datasets.Value("float32")) |
|
} |
|
) |
|
|
|
@property |
|
def keys(self): |
|
return ("image", "mask", "depth", "flow", "normal") |
|
|
|
|
|
|
|
|
|
|
|
class SMVBDataset(datasets.GeneratorBasedBuilder): |
|
"""SMVB dataset.""" |
|
|
|
BUILDER_CONFIG_CLASS = SMVBDatasetConfig |
|
BUILDER_CONFIGS = [ |
|
SMVBDatasetConfig( |
|
name = "all", |
|
description = "Synthetic data with rich annotations", |
|
version = _VERSION |
|
), |
|
] |
|
DEFAULT_WRITER_BATCH_SIZE = 10 |
|
|
|
def _info(self): |
|
return datasets.DatasetInfo( |
|
description=_DESCRIPTION, |
|
features=self.config.features, |
|
homepage=_HOMEPAGE, |
|
license=_LICENSE, |
|
citation=_CITATION, |
|
version=self.config.version, |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
local_data_paths = dl_manager.download(self.config._data_urls) |
|
local_data_gen = itertools.chain.from_iterable([dl_manager.iter_archive(path) for path in local_data_paths]) |
|
|
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={ |
|
"data": local_data_gen |
|
} |
|
) |
|
] |
|
|
|
def _generate_examples( |
|
self, |
|
data: Generator[Tuple[str,IO], None, None] |
|
): |
|
file_infos = [] |
|
keys = self.config.keys |
|
|
|
for i, info in enumerate(data): |
|
file_path, file_object = info |
|
if i%len(keys) < 2: |
|
file_infos.append((file_path, file_object.read())) |
|
else: |
|
file_infos.append((file_path, np.load(io.BytesIO(file_object.read())).flatten())) |
|
if (i+1)%len(keys) == 0: |
|
img_features_dict = {k:{'path':d[0],'bytes':d[1]} for k,d in zip(keys,file_infos) if k in ['image','mask']} |
|
array_features_dict = {k:d[1] for k,d in zip(keys,file_infos) if not k in ['image','mask']} |
|
data_dict = {**img_features_dict, **array_features_dict} |
|
yield (i//len(keys))-1, data_dict |
|
file_infos = [] |