ThiennNguyen commited on
Commit
133d294
1 Parent(s): f48163f

Upload MangaColoring.py

Browse files
Files changed (1) hide show
  1. MangaColoring.py +89 -0
MangaColoring.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ from huggingface_hub import hf_hub_url
3
+ import datasets
4
+ import os
5
+
6
+ _VERSION = datasets.Version("1.0.0")
7
+
8
+ _DESCRIPTION = "TODO"
9
+ _HOMEPAGE = "TODO"
10
+ _LICENSE = "TODO"
11
+ _CITATION = "TODO"
12
+
13
+ _FEATURES = datasets.Features(
14
+ {
15
+ "image": datasets.Image(),
16
+ "conditioning_image": datasets.Image(),
17
+ "text": datasets.Value("string"),
18
+ },
19
+ )
20
+
21
+ _URLS = {
22
+ "conditioning_images": "https://drive.google.com/file/d/1c8fzUvfzV3ZeyZDk6PYJSKM4tMn7KaET/view?usp=sharing",
23
+ "images": "https://drive.google.com/file/d/1pIonsEGel0-LfrpVcWmobgzqCkMTzTnx/view?usp=drive_link",
24
+ "text": "https://drive.google.com/file/d/1cqthsUTPJp8VpygfA-3Sk421kvTFtqXC/view?usp=drive_link"
25
+ }
26
+
27
+ _DEFAULT_CONFIG = datasets.BuilderConfig(name="default", version=_VERSION)
28
+
29
+
30
+ class MangaColoring(datasets.GeneratorBasedBuilder):
31
+ BUILDER_CONFIGS = [_DEFAULT_CONFIG]
32
+ DEFAULT_CONFIG_NAME = "default"
33
+
34
+ def _info(self):
35
+ return datasets.DatasetInfo(
36
+ description=_DESCRIPTION,
37
+ features=_FEATURES,
38
+ supervised_keys=None,
39
+ homepage=_HOMEPAGE,
40
+ license=_LICENSE,
41
+ citation=_CITATION,
42
+ )
43
+
44
+ def _split_generators(self, dl_manager):
45
+ metadata_path = dl_manager.download(_URLS['text'])
46
+ images_dir = dl_manager.download_and_extract(_URLS['images'])
47
+ conditioning_images_dir = dl_manager.download_and_extract(
48
+ _URLS['conditioning_images']
49
+ )
50
+
51
+ return [
52
+ datasets.SplitGenerator(
53
+ name=datasets.Split.TRAIN,
54
+ # These kwargs will be passed to _generate_examples
55
+ gen_kwargs={
56
+ "metadata_path": metadata_path,
57
+ "images_dir": images_dir,
58
+ "conditioning_images_dir": conditioning_images_dir,
59
+ },
60
+ ),
61
+ ]
62
+
63
+ def _generate_examples(self, metadata_path, images_dir, conditioning_images_dir):
64
+ metadata = pd.read_json(metadata_path, lines=True)
65
+
66
+ for _, row in metadata.iterrows():
67
+ text = row["text"]
68
+
69
+ image_path = row["image"]
70
+ image_path = os.path.join(images_dir, image_path)
71
+ image = open(image_path, "rb").read()
72
+
73
+ conditioning_image_path = row["conditioning_image"]
74
+ conditioning_image_path = os.path.join(
75
+ conditioning_images_dir, row["conditioning_image"]
76
+ )
77
+ conditioning_image = open(conditioning_image_path, "rb").read()
78
+
79
+ yield row["image"], {
80
+ "text": text,
81
+ "image": {
82
+ "path": image_path,
83
+ "bytes": image,
84
+ },
85
+ "conditioning_image": {
86
+ "path": conditioning_image_path,
87
+ "bytes": conditioning_image,
88
+ },
89
+ }