keremberke commited on
Commit
62d190d
1 Parent(s): 72efdf9

dataset uploaded by roboflow2huggingface package

Browse files
README.md CHANGED
@@ -3,10 +3,14 @@ task_categories:
3
  - object-detection
4
  tags:
5
  - roboflow
 
 
 
6
  ---
7
 
8
- ### Roboflow Dataset Page
9
- [https://universe.roboflow.com/augmented-startups/vehicle-registration-plates-trudk/dataset/1](https://universe.roboflow.com/augmented-startups/vehicle-registration-plates-trudk/dataset/1?ref=roboflow2huggingface)
 
10
 
11
  ### Dataset Labels
12
 
@@ -14,6 +18,34 @@ tags:
14
  ['license_plate']
15
  ```
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  ### Citation
18
 
19
  ```
@@ -27,7 +59,7 @@ tags:
27
  publisher = { Roboflow },
28
  year = { 2022 },
29
  month = { jun },
30
- note = { visited on 2023-01-01 },
31
  }
32
  ```
33
 
 
3
  - object-detection
4
  tags:
5
  - roboflow
6
+ - roboflow2huggingface
7
+ - Self Driving
8
+ - Anpr
9
  ---
10
 
11
+ <div align="center">
12
+ <img width="640" alt="keremberke/license-plate-object-detection" src="https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/thumbnail.jpg">
13
+ </div>
14
 
15
  ### Dataset Labels
16
 
 
18
  ['license_plate']
19
  ```
20
 
21
+
22
+ ### Number of Images
23
+
24
+ ```json
25
+ {'train': 6176, 'valid': 1765, 'test': 882}
26
+ ```
27
+
28
+
29
+ ### How to Use
30
+
31
+ - Install [datasets](https://pypi.org/project/datasets/):
32
+
33
+ ```bash
34
+ pip install datasets
35
+ ```
36
+
37
+ - Load the dataset:
38
+
39
+ ```python
40
+ from datasets import load_dataset
41
+
42
+ ds = load_dataset("keremberke/license-plate-object-detection", name="full")
43
+ example = ds['train'][0]
44
+ ```
45
+
46
+ ### Roboflow Dataset Page
47
+ [https://universe.roboflow.com/augmented-startups/vehicle-registration-plates-trudk/dataset/1?ref=roboflow2huggingface](https://universe.roboflow.com/augmented-startups/vehicle-registration-plates-trudk/dataset/1?ref=roboflow2huggingface?ref=roboflow2huggingface)
48
+
49
  ### Citation
50
 
51
  ```
 
59
  publisher = { Roboflow },
60
  year = { 2022 },
61
  month = { jun },
62
+ note = { visited on 2023-01-18 },
63
  }
64
  ```
65
 
data/test.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0da8f2a4a703d126b4c39416940aa93f1fc7c9c4c75d7d86a2c5e245f8918258
3
  size 21917681
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8fbc65b333765e9883ba0a86a767c87875c6e1a420ff9eb6c411ed9e58155026
3
  size 21917681
data/train.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1275562c0716aee2e9f377c86f74fe76a80a9057a30d1d518b88cfbc3f0785e8
3
  size 163307791
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d44bf42f2a46e3df0d88f98311d6bca0eaadbaa0958aa41b629e1c101baddb14
3
  size 163307791
data/valid-mini.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e94ce5ab72763c8dbb1e15e3134270960b80d0afe68eec735d601ebdce1f9aa
3
+ size 72330
data/valid.zip CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ed8ccebeb8c09fbc19836eaf88c45105fddf27e4909c0e8ed79cabcbd85742d4
3
  size 44930828
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ec44d2f6d418a5ad30783bccd2d1883b45aca3baa02843ae5d2421796fe0c01
3
  size 44930828
license-plate-object-detection.py CHANGED
@@ -5,7 +5,7 @@ import os
5
  import datasets
6
 
7
 
8
- _HOMEPAGE = "https://universe.roboflow.com/augmented-startups/vehicle-registration-plates-trudk/dataset/1"
9
  _LICENSE = "CC BY 4.0"
10
  _CITATION = """\
11
  @misc{ vehicle-registration-plates-trudk_dataset,
@@ -18,21 +18,52 @@ _CITATION = """\
18
  publisher = { Roboflow },
19
  year = { 2022 },
20
  month = { jun },
21
- note = { visited on 2023-01-01 },
22
  }
23
  """
24
- _URLS = {
25
- "train": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/train.zip",
26
- "validation": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/valid.zip",
27
- "test": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/test.zip",
28
- }
29
-
30
  _CATEGORIES = ['license_plate']
31
  _ANNOTATION_FILENAME = "_annotations.coco.json"
32
 
33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34
  class LICENSEPLATEOBJECTDETECTION(datasets.GeneratorBasedBuilder):
 
 
35
  VERSION = datasets.Version("1.0.0")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
 
37
  def _info(self):
38
  features = datasets.Features(
@@ -59,7 +90,7 @@ class LICENSEPLATEOBJECTDETECTION(datasets.GeneratorBasedBuilder):
59
  )
60
 
61
  def _split_generators(self, dl_manager):
62
- data_files = dl_manager.download_and_extract(_URLS)
63
  return [
64
  datasets.SplitGenerator(
65
  name=datasets.Split.TRAIN,
@@ -92,7 +123,7 @@ class LICENSEPLATEOBJECTDETECTION(datasets.GeneratorBasedBuilder):
92
 
93
  image_id_to_image = {}
94
  idx = 0
95
-
96
  annotation_filepath = os.path.join(folder_dir, _ANNOTATION_FILENAME)
97
  with open(annotation_filepath, "r") as f:
98
  annotations = json.load(f)
@@ -100,12 +131,12 @@ class LICENSEPLATEOBJECTDETECTION(datasets.GeneratorBasedBuilder):
100
  image_id_to_annotations = collections.defaultdict(list)
101
  for annot in annotations["annotations"]:
102
  image_id_to_annotations[annot["image_id"]].append(annot)
103
- image_id_to_image = {annot["file_name"]: annot for annot in annotations["images"]}
104
 
105
  for filename in os.listdir(folder_dir):
106
  filepath = os.path.join(folder_dir, filename)
107
- if filename in image_id_to_image:
108
- image = image_id_to_image[filename]
109
  objects = [
110
  process_annot(annot, category_id_to_category) for annot in image_id_to_annotations[image["id"]]
111
  ]
 
5
  import datasets
6
 
7
 
8
+ _HOMEPAGE = "https://universe.roboflow.com/augmented-startups/vehicle-registration-plates-trudk/dataset/1?ref=roboflow2huggingface"
9
  _LICENSE = "CC BY 4.0"
10
  _CITATION = """\
11
  @misc{ vehicle-registration-plates-trudk_dataset,
 
18
  publisher = { Roboflow },
19
  year = { 2022 },
20
  month = { jun },
21
+ note = { visited on 2023-01-18 },
22
  }
23
  """
 
 
 
 
 
 
24
  _CATEGORIES = ['license_plate']
25
  _ANNOTATION_FILENAME = "_annotations.coco.json"
26
 
27
 
28
+ class LICENSEPLATEOBJECTDETECTIONConfig(datasets.BuilderConfig):
29
+ """Builder Config for license-plate-object-detection"""
30
+
31
+ def __init__(self, data_urls, **kwargs):
32
+ """
33
+ BuilderConfig for license-plate-object-detection.
34
+
35
+ Args:
36
+ data_urls: `dict`, name to url to download the zip file from.
37
+ **kwargs: keyword arguments forwarded to super.
38
+ """
39
+ super(LICENSEPLATEOBJECTDETECTIONConfig, self).__init__(version=datasets.Version("1.0.0"), **kwargs)
40
+ self.data_urls = data_urls
41
+
42
+
43
  class LICENSEPLATEOBJECTDETECTION(datasets.GeneratorBasedBuilder):
44
+ """license-plate-object-detection object detection dataset"""
45
+
46
  VERSION = datasets.Version("1.0.0")
47
+ BUILDER_CONFIGS = [
48
+ LICENSEPLATEOBJECTDETECTIONConfig(
49
+ name="full",
50
+ description="Full version of license-plate-object-detection dataset.",
51
+ data_urls={
52
+ "train": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/train.zip",
53
+ "validation": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/valid.zip",
54
+ "test": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/test.zip",
55
+ },
56
+ ),
57
+ LICENSEPLATEOBJECTDETECTIONConfig(
58
+ name="mini",
59
+ description="Mini version of license-plate-object-detection dataset.",
60
+ data_urls={
61
+ "train": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/valid-mini.zip",
62
+ "validation": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/valid-mini.zip",
63
+ "test": "https://huggingface.co/datasets/keremberke/license-plate-object-detection/resolve/main/data/valid-mini.zip",
64
+ },
65
+ )
66
+ ]
67
 
68
  def _info(self):
69
  features = datasets.Features(
 
90
  )
91
 
92
  def _split_generators(self, dl_manager):
93
+ data_files = dl_manager.download_and_extract(self.config.data_urls)
94
  return [
95
  datasets.SplitGenerator(
96
  name=datasets.Split.TRAIN,
 
123
 
124
  image_id_to_image = {}
125
  idx = 0
126
+
127
  annotation_filepath = os.path.join(folder_dir, _ANNOTATION_FILENAME)
128
  with open(annotation_filepath, "r") as f:
129
  annotations = json.load(f)
 
131
  image_id_to_annotations = collections.defaultdict(list)
132
  for annot in annotations["annotations"]:
133
  image_id_to_annotations[annot["image_id"]].append(annot)
134
+ filename_to_image = {image["file_name"]: image for image in annotations["images"]}
135
 
136
  for filename in os.listdir(folder_dir):
137
  filepath = os.path.join(folder_dir, filename)
138
+ if filename in filename_to_image:
139
+ image = filename_to_image[filename]
140
  objects = [
141
  process_annot(annot, category_id_to_category) for annot in image_id_to_annotations[image["id"]]
142
  ]
split_name_to_num_samples.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train": 6176, "valid": 1765, "test": 882}
thumbnail.jpg ADDED

Git LFS Details

  • SHA256: 356f7ebb5f3c5a9041157f938ee061a499146eee0bfbe5390041e73217424223
  • Pointer size: 131 Bytes
  • Size of remote file: 130 kB