import json import os from PIL import Image import datasets _CITATION = """\ @SIA86{huggingface:dataset, title = {WaterFlowCountersRecognition dataset}, author={SIA86}, year={2023} } """ _DESCRIPTION = """\ This dataset is designed to detect digital data from water flow counters photos. """ _HOMEPAGE = "https://github.com/SIA86/WaterFlowRecognition" _REGION_NAME = ['value_a', 'value_b', 'serial'] _REGION_ROTETION = ['0', '90', '180', '270'] class WaterFlowCounterConfig(datasets.BuilderConfig): """Builder Config for WaterFlowCounter""" def __init__(self, data_url, metadata_url, **kwargs): """BuilderConfig for WaterFlowCounter. Args: data_url: `string`, url to download the photos. metadata_urls: instance segmentation regions and description **kwargs: keyword arguments forwarded to super. """ super(WaterFlowCounterConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs) self.data_url = data_url self.metadata_url = metadata_url class WaterFlowCounter(datasets.GeneratorBasedBuilder): """WaterFlowCounter Images dataset""" BUILDER_CONFIGS = [ WaterFlowCounterConfig( name="WFCR_full", description="Full dataset which contains coordinates and names of regions and information about rotation", data_url={ "train": "data/train_photos.zip", "test": "data/test_photos.zip", }, metadata_url={ 'full': "data/WaterFlowCounter.json" } ) ] def _info(self): features = datasets.Features( { "image_id": datasets.Value("int64"), "image": datasets.Image(), "width": datasets.Value("int32"), "height": datasets.Value("int32"), "objects": datasets.Sequence( { "id": datasets.Value("int64"), "area": datasets.Value("int64"), "bbox": datasets.Sequence(datasets.Value("float32"), length=4), "category": datasets.ClassLabel(names=_REGION_NAME), } ), } ) return datasets.DatasetInfo( features=features, homepage=_HOMEPAGE, citation=_CITATION, ) def _split_generators(self, dl_manager): data_files = dl_manager.download_and_extract(self.config.data_url) meta_file = dl_manager.download(self.config.metadata_url) return [ datasets.SplitGenerator( name=datasets.Split.TRAIN, gen_kwargs={ "folder_dir": data_files["train"], "metadata_path": meta_file['full'] }, ), datasets.SplitGenerator( name=datasets.Split.TEST, gen_kwargs={ "folder_dir": data_files["test"], "metadata_path": meta_file['full'] }, ) ] def _generate_examples(self, folder_dir, metadata_path): name_to_id = {} rotation_to_id = {} for indx, name in enumerate(_REGION_NAME): name_to_id[name] = indx for indx, name in enumerate(_REGION_ROTETION): rotation_to_id[name] = indx with open(metadata_path, "r", encoding='utf-8') as f: annotations = json.load(f) idx = 0 id = 0 for file in os.listdir(folder_dir): filepath = os.path.join(folder_dir, file) with open(filepath, "rb") as f: image_bytes = f.read() image = Image.open(filepath) width, height = image.size all_bbox = [] all_area = [] all_segmentation = [] names = [] rotated = [] ids = [] for el in annotations['_via_img_metadata']: if annotations['_via_img_metadata'][el]['filename'] == file: for region in annotations['_via_img_metadata'][el]['regions']: ids.append(id) id += 1 all_x = region['shape_attributes']['all_points_x'] all_y = region['shape_attributes']['all_points_y'] x_min = min(all_x) y_min = min(all_y) x_max = max(all_x) y_max = max(all_y) p_width = x_max - x_min p_height = y_max - y_min bbox = [x_min, y_min, p_width, p_height ] area = p_width * p_height segmentation = list(zip(all_x, all_y)) all_bbox.append(bbox) all_area.append(area) all_segmentation.append(segmentation) for name in list(region['region_attributes']['name'].keys()): names.append(name_to_id[name]) if len(names) > 3: names = names[:-3] ''' try: for rot in list(region['region_attributes']['rotated'].keys()): rotated.append(rotation_to_id[rot]) except: rotated.append(int(region['region_attributes']['rotated'])) ''' yield idx, { "image_id": idx, "image": {"path": filepath, "bytes": image_bytes}, "width": width, "height": height, "objects": { "id": ids, "area": all_area, "bbox": all_bbox, "category":names, } } idx += 1