sayakpaul HF staff commited on
Commit
7708f03
1 Parent(s): 210e4b9

Delete generate_kandinsky.py

Browse files
Files changed (1) hide show
  1. generate_kandinsky.py +0 -100
generate_kandinsky.py DELETED
@@ -1,100 +0,0 @@
1
- import PIL
2
- import torch
3
- from datasets import Dataset, Features
4
- from datasets import Image as ImageFeature
5
- from datasets import Value, load_dataset
6
-
7
- from diffusers import DiffusionPipeline
8
-
9
-
10
- def main():
11
- print("Loading dataset...")
12
- parti_prompts = load_dataset("nateraw/parti-prompts", split="train")
13
-
14
- print("Loading pipeline...")
15
- pipe_prior = DiffusionPipeline.from_pretrained(
16
- "kandinsky-community/kandinsky-2-2-prior", torch_dtype=torch.float16
17
- )
18
- pipe_prior.to("cuda")
19
- pipe_prior.set_progress_bar_config(disable=True)
20
-
21
- t2i_pipe = DiffusionPipeline.from_pretrained(
22
- "kandinsky-community/kandinsky-2-2-decoder", torch_dtype=torch.float16
23
- )
24
- t2i_pipe.to("cuda")
25
- t2i_pipe.set_progress_bar_config(disable=True)
26
-
27
- seed = 0
28
- generator = torch.Generator("cuda").manual_seed(seed)
29
- ckpt_id = (
30
- "kandinsky-community/" + "kandinsky-2-2-prior" + "_" + "kandinsky-2-2-decoder"
31
- )
32
-
33
- print("Running inference...")
34
- main_dict = {}
35
- for i in range(len(parti_prompts)):
36
- sample = parti_prompts[i]
37
- prompt = sample["Prompt"]
38
-
39
- image_embeds, negative_image_embeds = pipe_prior(
40
- prompt,
41
- generator=generator,
42
- num_inference_steps=100,
43
- guidance_scale=7.5,
44
- ).to_tuple()
45
- image = t2i_pipe(
46
- image_embeds=image_embeds,
47
- negative_image_embeds=negative_image_embeds,
48
- generator=generator,
49
- num_inference_steps=100,
50
- guidance_scale=7.5,
51
- ).images[0]
52
-
53
- image = image.resize((256, 256), resample=PIL.Image.Resampling.LANCZOS)
54
- img_path = f"kandinsky_22_{i}.png"
55
- image.save(img_path)
56
- main_dict.update(
57
- {
58
- prompt: {
59
- "img_path": img_path,
60
- "Category": sample["Category"],
61
- "Challenge": sample["Challenge"],
62
- "Note": sample["Note"],
63
- "model_name": ckpt_id,
64
- "seed": seed,
65
- }
66
- }
67
- )
68
-
69
- def generation_fn():
70
- for prompt in main_dict:
71
- prompt_entry = main_dict[prompt]
72
- yield {
73
- "Prompt": prompt,
74
- "Category": prompt_entry["Category"],
75
- "Challenge": prompt_entry["Challenge"],
76
- "Note": prompt_entry["Note"],
77
- "images": {"path": prompt_entry["img_path"]},
78
- "model_name": prompt_entry["model_name"],
79
- "seed": prompt_entry["seed"],
80
- }
81
-
82
- print("Preparing HF dataset...")
83
- ds = Dataset.from_generator(
84
- generation_fn,
85
- features=Features(
86
- Prompt=Value("string"),
87
- Category=Value("string"),
88
- Challenge=Value("string"),
89
- Note=Value("string"),
90
- images=ImageFeature(),
91
- model_name=Value("string"),
92
- seed=Value("int64"),
93
- ),
94
- )
95
- ds_id = "diffusers-parti-prompts/kandinsky-2-2"
96
- ds.push_to_hub(ds_id)
97
-
98
-
99
- if __name__ == "__main__":
100
- main()