keremberke commited on
Commit
2c0f4e8
1 Parent(s): 862488b

dataset uploaded by roboflow2huggingface package

Browse files
README.md CHANGED
@@ -3,10 +3,13 @@ task_categories:
3
  - object-detection
4
  tags:
5
  - roboflow
 
 
6
  ---
7
 
8
- ### Roboflow Dataset Page
9
- [https://universe.roboflow.com/mohamed-traore-2ekkp/forklift-dsitv/dataset/1](https://universe.roboflow.com/mohamed-traore-2ekkp/forklift-dsitv/dataset/1?ref=roboflow2huggingface)
 
10
 
11
  ### Dataset Labels
12
 
@@ -14,6 +17,34 @@ tags:
14
  ['forklift', 'person']
15
  ```
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  ### Citation
18
 
19
  ```
@@ -27,7 +58,7 @@ tags:
27
  publisher = { Roboflow },
28
  year = { 2022 },
29
  month = { mar },
30
- note = { visited on 2023-01-01 },
31
  }
32
  ```
33
 
 
3
  - object-detection
4
  tags:
5
  - roboflow
6
+ - roboflow2huggingface
7
+ - Manufacturing
8
  ---
9
 
10
+ <div align="center">
11
+ <img width="640" alt="keremberke/forklift-object-detection" src="https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/thumbnail.jpg">
12
+ </div>
13
 
14
  ### Dataset Labels
15
 
 
17
  ['forklift', 'person']
18
  ```
19
 
20
+
21
+ ### Number of Images
22
+
23
+ ```json
24
+ {'test': 42, 'valid': 84, 'train': 295}
25
+ ```
26
+
27
+
28
+ ### How to Use
29
+
30
+ - Install [datasets](https://pypi.org/project/datasets/):
31
+
32
+ ```bash
33
+ pip install datasets
34
+ ```
35
+
36
+ - Load the dataset:
37
+
38
+ ```python
39
+ from datasets import load_dataset
40
+
41
+ ds = load_dataset("keremberke/forklift-object-detection", name="full")
42
+ example = ds['train'][0]
43
+ ```
44
+
45
+ ### Roboflow Dataset Page
46
+ [https://universe.roboflow.com/mohamed-traore-2ekkp/forklift-dsitv/dataset/1](https://universe.roboflow.com/mohamed-traore-2ekkp/forklift-dsitv/dataset/1?ref=roboflow2huggingface)
47
+
48
  ### Citation
49
 
50
  ```
 
58
  publisher = { Roboflow },
59
  year = { 2022 },
60
  month = { mar },
61
+ note = { visited on 2023-01-15 },
62
  }
63
  ```
64
 
data/test.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d47399ff43cca765cc4c9c5293ce0d6fe8d14f4658e39506bdce2fdadff31003
3
  size 2771676
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9baace711b989e84c731fa3b3e2b45f8a636880d475c2c2d6b91b8cdff803cbe
3
  size 2771676
data/train.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fb46061800b2175f2a1e5aa2c3136af198c41dfd1ef514f5979b92715e9c307d
3
  size 13533922
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf5c3364b39160d4d212a454b9a7689f8b89e81eab3ea3da50031c4e7bf92fe0
3
  size 13533922
data/valid-mini.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:94d605e3645a7bd47a91aae22a14cce0b17d40b2c9483aed5ad30be756e0c038
3
+ size 105141
data/valid.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c98ee46aba21d732e1b9b7a7faa64d0910410f9020fa135454dc78087b35ca15
3
  size 3774165
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f8013a437dc85e8f218b252ce8cffa32b2724c8021cf6b983febd3a9145d53f
3
  size 3774165
forklift-object-detection.py CHANGED
@@ -18,21 +18,52 @@ _CITATION = """\
18
  publisher = { Roboflow },
19
  year = { 2022 },
20
  month = { mar },
21
- note = { visited on 2023-01-01 },
22
  }
23
  """
24
- _URLS = {
25
- "train": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/train.zip",
26
- "validation": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/valid.zip",
27
- "test": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/test.zip",
28
- }
29
-
30
  _CATEGORIES = ['forklift', 'person']
31
  _ANNOTATION_FILENAME = "_annotations.coco.json"
32
 
33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  class FORKLIFTOBJECTDETECTION(datasets.GeneratorBasedBuilder):
 
 
35
  VERSION = datasets.Version("1.0.0")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
  def _info(self):
38
  features = datasets.Features(
@@ -59,7 +90,7 @@ class FORKLIFTOBJECTDETECTION(datasets.GeneratorBasedBuilder):
59
  )
60
 
61
  def _split_generators(self, dl_manager):
62
- data_files = dl_manager.download_and_extract(_URLS)
63
  return [
64
  datasets.SplitGenerator(
65
  name=datasets.Split.TRAIN,
@@ -92,7 +123,7 @@ class FORKLIFTOBJECTDETECTION(datasets.GeneratorBasedBuilder):
92
 
93
  image_id_to_image = {}
94
  idx = 0
95
-
96
  annotation_filepath = os.path.join(folder_dir, _ANNOTATION_FILENAME)
97
  with open(annotation_filepath, "r") as f:
98
  annotations = json.load(f)
@@ -100,12 +131,12 @@ class FORKLIFTOBJECTDETECTION(datasets.GeneratorBasedBuilder):
100
  image_id_to_annotations = collections.defaultdict(list)
101
  for annot in annotations["annotations"]:
102
  image_id_to_annotations[annot["image_id"]].append(annot)
103
- image_id_to_image = {annot["file_name"]: annot for annot in annotations["images"]}
104
 
105
  for filename in os.listdir(folder_dir):
106
  filepath = os.path.join(folder_dir, filename)
107
- if filename in image_id_to_image:
108
- image = image_id_to_image[filename]
109
  objects = [
110
  process_annot(annot, category_id_to_category) for annot in image_id_to_annotations[image["id"]]
111
  ]
 
18
  publisher = { Roboflow },
19
  year = { 2022 },
20
  month = { mar },
21
+ note = { visited on 2023-01-15 },
22
  }
23
  """
 
 
 
 
 
 
24
  _CATEGORIES = ['forklift', 'person']
25
  _ANNOTATION_FILENAME = "_annotations.coco.json"
26
 
27
 
28
+ class FORKLIFTOBJECTDETECTIONConfig(datasets.BuilderConfig):
29
+ """Builder Config for forklift-object-detection"""
30
+
31
+ def __init__(self, data_urls, **kwargs):
32
+ """
33
+ BuilderConfig for forklift-object-detection.
34
+
35
+ Args:
36
+ data_urls: `dict`, name to url to download the zip file from.
37
+ **kwargs: keyword arguments forwarded to super.
38
+ """
39
+ super(FORKLIFTOBJECTDETECTIONConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
40
+ self.data_urls = data_urls
41
+
42
+
43
  class FORKLIFTOBJECTDETECTION(datasets.GeneratorBasedBuilder):
44
+ """forklift-object-detection object detection dataset"""
45
+
46
  VERSION = datasets.Version("1.0.0")
47
+ BUILDER_CONFIGS = [
48
+ FORKLIFTOBJECTDETECTIONConfig(
49
+ name="full",
50
+ description="Full version of forklift-object-detection dataset.",
51
+ data_urls={
52
+ "train": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/train.zip",
53
+ "validation": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/valid.zip",
54
+ "test": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/test.zip",
55
+ },
56
+ ),
57
+ FORKLIFTOBJECTDETECTIONConfig(
58
+ name="mini",
59
+ description="Mini version of forklift-object-detection dataset.",
60
+ data_urls={
61
+ "train": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/valid-mini.zip",
62
+ "validation": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/valid-mini.zip",
63
+ "test": "https://huggingface.co/datasets/keremberke/forklift-object-detection/resolve/main/data/valid-mini.zip",
64
+ },
65
+ )
66
+ ]
67
 
68
  def _info(self):
69
  features = datasets.Features(
 
90
  )
91
 
92
  def _split_generators(self, dl_manager):
93
+ data_files = dl_manager.download_and_extract(self.config.data_urls)
94
  return [
95
  datasets.SplitGenerator(
96
  name=datasets.Split.TRAIN,
 
123
 
124
  image_id_to_image = {}
125
  idx = 0
126
+
127
  annotation_filepath = os.path.join(folder_dir, _ANNOTATION_FILENAME)
128
  with open(annotation_filepath, "r") as f:
129
  annotations = json.load(f)
 
131
  image_id_to_annotations = collections.defaultdict(list)
132
  for annot in annotations["annotations"]:
133
  image_id_to_annotations[annot["image_id"]].append(annot)
134
+ filename_to_image = {image["file_name"]: image for image in annotations["images"]}
135
 
136
  for filename in os.listdir(folder_dir):
137
  filepath = os.path.join(folder_dir, filename)
138
+ if filename in filename_to_image:
139
+ image = filename_to_image[filename]
140
  objects = [
141
  process_annot(annot, category_id_to_category) for annot in image_id_to_annotations[image["id"]]
142
  ]
split_name_to_num_samples.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"test": 42, "valid": 84, "train": 295}
thumbnail.jpg ADDED

Git LFS Details

  • SHA256: 0298b2e241a1e3a65906ba6ff3c45a64cf56744ad74d06d67e5a92db421b9289
  • Pointer size: 131 Bytes
  • Size of remote file: 159 kB