awang11 commited on
Commit
67a7a1c
1 Parent(s): cd87250
Files changed (1) hide show
  1. VGGSound_ControlNet.py +115 -0
VGGSound_ControlNet.py ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ from huggingface_hub import hf_hub_url
3
+ import datasets
4
+ import os
5
+
6
+ _VERSION = datasets.Version("0.0.2")
7
+
8
+ _DESCRIPTION = "TODO"
9
+ _HOMEPAGE = "TODO"
10
+ _LICENSE = "TODO"
11
+ _CITATION = "TODO"
12
+
13
+ _FEATURES = datasets.Features(
14
+ {
15
+ "image": datasets.Image(),
16
+ "conditioning_image": datasets.Image(),
17
+ "text": datasets.Value("string"),
18
+ },
19
+ )
20
+
21
+ TRAIN_METADATA_URL = hf_hub_url(
22
+ "awang11/VGGSound_ControlNet",
23
+ filename="train.jsonl",
24
+ repo_type="dataset",
25
+ )
26
+
27
+ TEST_METADATA_URL = hf_hub_url(
28
+ "awang11/VGGSound_ControlNet",
29
+ filename="test.jsonl",
30
+ repo_type="dataset",
31
+ )
32
+
33
+ IMAGES_URL = hf_hub_url(
34
+ "awang11/VGGSound_ControlNet",
35
+ filename="images.zip",
36
+ repo_type="dataset",
37
+ )
38
+
39
+ CONDITIONING_IMAGES_URL = hf_hub_url(
40
+ "awang11/VGGSound_ControlNet",
41
+ filename="conditioning_images.zip",
42
+ repo_type="dataset",
43
+ )
44
+
45
+ _DEFAULT_CONFIG = datasets.BuilderConfig(name="default", version=_VERSION)
46
+
47
+
48
+ class Fill50k(datasets.GeneratorBasedBuilder):
49
+ BUILDER_CONFIGS = [_DEFAULT_CONFIG]
50
+ DEFAULT_CONFIG_NAME = "default"
51
+
52
+ def _info(self):
53
+ return datasets.DatasetInfo(
54
+ description=_DESCRIPTION,
55
+ features=_FEATURES,
56
+ supervised_keys=None,
57
+ homepage=_HOMEPAGE,
58
+ license=_LICENSE,
59
+ citation=_CITATION,
60
+ )
61
+
62
+ def _split_generators(self, dl_manager):
63
+ train_metadata_path = dl_manager.download(TRAIN_METADATA_URL)
64
+ test_metadata_path = dl_manager.download(TEST_METADATA_URL)
65
+ images_dir = dl_manager.download_and_extract(IMAGES_URL)
66
+ conditioning_images_dir = dl_manager.download_and_extract(CONDITIONING_IMAGES_URL)
67
+
68
+ return [
69
+ datasets.SplitGenerator(
70
+ name=datasets.Split.TRAIN,
71
+ # These kwargs will be passed to _generate_examples
72
+ gen_kwargs={
73
+ "metadata_path": train_metadata_path,
74
+ "images_dir": images_dir,
75
+ "conditioning_images_dir": conditioning_images_dir,
76
+ },
77
+ ),
78
+ datasets.SplitGenerator(
79
+ name=datasets.Split.TEST,
80
+ # These kwargs will be passed to _generate_examples
81
+ gen_kwargs={
82
+ "metadata_path": test_metadata_path,
83
+ "images_dir": images_dir,
84
+ "conditioning_images_dir": conditioning_images_dir,
85
+ },
86
+ ),
87
+ ]
88
+
89
+ def _generate_examples(self, metadata_path, images_dir, conditioning_images_dir):
90
+ metadata = pd.read_json(metadata_path, lines=True)
91
+
92
+ for _, row in metadata.iterrows():
93
+ text = row["text"]
94
+
95
+ image_path = row["image"]
96
+ image_path = os.path.join(images_dir, image_path)
97
+ image = open(image_path, "rb").read()
98
+
99
+ conditioning_image_path = row["conditioning_image"]
100
+ conditioning_image_path = os.path.join(
101
+ conditioning_images_dir, row["conditioning_image"]
102
+ )
103
+ conditioning_image = open(conditioning_image_path, "rb").read()
104
+
105
+ yield row["image"], {
106
+ "text": text,
107
+ "image": {
108
+ "path": image_path,
109
+ "bytes": image,
110
+ },
111
+ "conditioning_image": {
112
+ "path": conditioning_image_path,
113
+ "bytes": conditioning_image,
114
+ },
115
+ }