|
import numpy as np |
|
import datasets |
|
|
|
class HyperPenguinPix(datasets.GeneratorBasedBuilder): |
|
def _info(self): |
|
return datasets.DatasetInfo( |
|
features=datasets.Features({ |
|
'feature': datasets.Sequence(datasets.Value('int32')), |
|
'target': datasets.Value('int64'), |
|
}), |
|
supervised_keys=('image', 'label'), |
|
homepage='https://033labcodes.github.io/igrass24_penguin/', |
|
) |
|
|
|
def _split_generators(self, dl_manager): |
|
return [ |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TRAIN, |
|
gen_kwargs={'file_path': dl_manager.download(['data/train/data.npz'])} |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.TEST, |
|
gen_kwargs={'file_path': dl_manager.download(['data/test/data.npz'])} |
|
), |
|
datasets.SplitGenerator( |
|
name=datasets.Split.VALIDATION, |
|
gen_kwargs={'file_path': dl_manager.download(['data/validation/data.npz'])} |
|
), |
|
] |
|
|
|
def _generate_examples(self, file_path): |
|
|
|
data = np.load(file_path[0]) |
|
features = data['features'] |
|
targets = data['targets'] |
|
for idx in range(len(features)): |
|
yield idx, { |
|
'feature': features[idx], |
|
'target': targets[idx], |
|
} |
|
|