Convert dataset to Parquet

#1
README.md CHANGED
@@ -56,13 +56,20 @@ dataset_info:
56
  '46': zigzagged
57
  splits:
58
  - name: train
59
- num_bytes: 448550
60
  num_examples: 3760
61
  - name: test
62
- num_bytes: 220515
63
  num_examples: 1880
64
- download_size: 625712354
65
- dataset_size: 669065
 
 
 
 
 
 
 
66
  ---
67
 
68
  # [DTD: Describable Textures Dataset](https://www.robots.ox.ac.uk/~vgg/data/dtd/)
 
56
  '46': zigzagged
57
  splits:
58
  - name: train
59
+ num_bytes: 463693721.28
60
  num_examples: 3760
61
  - name: test
62
+ num_bytes: 171623828.0
63
  num_examples: 1880
64
+ download_size: 629499529
65
+ dataset_size: 635317549.28
66
+ configs:
67
+ - config_name: default
68
+ data_files:
69
+ - split: train
70
+ path: data/train-*
71
+ - split: test
72
+ path: data/test-*
73
  ---
74
 
75
  # [DTD: Describable Textures Dataset](https://www.robots.ox.ac.uk/~vgg/data/dtd/)
data/{test.zip → test-00000-of-00001.parquet} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:173d40121e4f4ee90e09e5082ab76e124355297b3b72670aec49463115c953bd
3
- size 177943979
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e137d5255ddd649a004f19e6b36f170572e2d6504b047f2f4fdb616180fdce3
3
+ size 179155504
data/{train.zip → train-00000-of-00001.parquet} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:81bc25a565bf38343376b04f55d1fd81c0e03fb5a2005734d3c06d16ef99ade2
3
- size 447768375
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5d4146bc2770ff237b8c5eae693cc44ddfd1162db35de37e9a89b02fd6094eb
3
+ size 450344025
dtd.py DELETED
@@ -1,94 +0,0 @@
1
- import datasets
2
- from datasets.data_files import DataFilesDict
3
- from datasets.packaged_modules.imagefolder.imagefolder import ImageFolder, ImageFolderConfig
4
-
5
- logger = datasets.logging.get_logger(__name__)
6
-
7
-
8
- class GTSRB(ImageFolder):
9
- R"""
10
- DTD dataset for image classification.
11
- """
12
-
13
- BUILDER_CONFIG_CLASS = ImageFolderConfig
14
- BUILDER_CONFIGS = [
15
- ImageFolderConfig(
16
- name="default",
17
- features=("images", "labels"),
18
- data_files=DataFilesDict({split: f"data/{split}.zip" for split in ["train", "test"]}),
19
- )
20
- ]
21
-
22
- classnames = [
23
- "banded",
24
- "blotchy",
25
- "braided",
26
- "bubbly",
27
- "bumpy",
28
- "chequered",
29
- "cobwebbed",
30
- "cracked",
31
- "crosshatched",
32
- "crystalline",
33
- "dotted",
34
- "fibrous",
35
- "flecked",
36
- "freckled",
37
- "frilly",
38
- "gauzy",
39
- "grid",
40
- "grooved",
41
- "honeycombed",
42
- "interlaced",
43
- "knitted",
44
- "lacelike",
45
- "lined",
46
- "marbled",
47
- "matted",
48
- "meshed",
49
- "paisley",
50
- "perforated",
51
- "pitted",
52
- "pleated",
53
- "polka-dotted",
54
- "porous",
55
- "potholed",
56
- "scaly",
57
- "smeared",
58
- "spiralled",
59
- "sprinkled",
60
- "stained",
61
- "stratified",
62
- "striped",
63
- "studded",
64
- "swirly",
65
- "veined",
66
- "waffled",
67
- "woven",
68
- "wrinkled",
69
- "zigzagged",
70
- ]
71
-
72
- clip_templates = [
73
- lambda c: f"a photo of a {c} texture.",
74
- lambda c: f"a photo of a {c} pattern.",
75
- lambda c: f"a photo of a {c} thing.",
76
- lambda c: f"a photo of a {c} object.",
77
- lambda c: f"a photo of the {c} texture.",
78
- lambda c: f"a photo of the {c} pattern.",
79
- lambda c: f"a photo of the {c} thing.",
80
- lambda c: f"a photo of the {c} object.",
81
- ]
82
-
83
- def _info(self):
84
- return datasets.DatasetInfo(
85
- description="DTD dataset for image classification.",
86
- features=datasets.Features(
87
- {
88
- "image": datasets.Image(),
89
- "label": datasets.ClassLabel(names=self.classnames),
90
- }
91
- ),
92
- supervised_keys=("image", "label"),
93
- task_templates=[datasets.ImageClassification(image_column="image", label_column="label")],
94
- )