coxmetin commited on
Commit
d704325
1 Parent(s): 3576efc

Your commit menssage

Browse files
Files changed (2) hide show
  1. app.py +205 -0
  2. requirements.txt +5 -0
app.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ import os
4
+ import random
5
+ import uuid
6
+
7
+ import gradio as gr
8
+ import numpy as np
9
+ from PIL import Image
10
+ import spaces
11
+ import torch
12
+ from diffusers import StableDiffusionXLPipeline, EulerAncestralDiscreteScheduler
13
+
14
+ DESCRIPTION = """
15
+ # DALL•E 3 XL v2
16
+ """
17
+
18
+ def save_image(img):
19
+ unique_name = str(uuid.uuid4()) + ".png"
20
+ img.save(unique_name)
21
+ return unique_name
22
+
23
+ def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
24
+ if randomize_seed:
25
+ seed = random.randint(0, MAX_SEED)
26
+ return seed
27
+
28
+ MAX_SEED = np.iinfo(np.int32).max
29
+
30
+ if not torch.cuda.is_available():
31
+ DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
32
+
33
+ MAX_SEED = np.iinfo(np.int32).max
34
+
35
+ USE_TORCH_COMPILE = 0
36
+ ENABLE_CPU_OFFLOAD = 0
37
+
38
+
39
+ if torch.cuda.is_available():
40
+ pipe = StableDiffusionXLPipeline.from_pretrained(
41
+ "fluently/Fluently-XL-v2",
42
+ torch_dtype=torch.float16,
43
+ use_safetensors=True,
44
+ )
45
+ pipe.scheduler = EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config)
46
+
47
+
48
+ pipe.load_lora_weights("ehristoforu/dalle-3-xl-v2", weight_name="dalle-3-xl-lora-v2.safetensors", adapter_name="dalle")
49
+ pipe.set_adapters("dalle")
50
+
51
+ pipe.to("cuda")
52
+
53
+
54
+
55
+ @spaces.GPU(enable_queue=True)
56
+ def generate(
57
+ prompt: str,
58
+ negative_prompt: str = "",
59
+ use_negative_prompt: bool = False,
60
+ seed: int = 0,
61
+ width: int = 1024,
62
+ height: int = 1024,
63
+ guidance_scale: float = 3,
64
+ randomize_seed: bool = False,
65
+ progress=gr.Progress(track_tqdm=True),
66
+ ):
67
+
68
+
69
+ seed = int(randomize_seed_fn(seed, randomize_seed))
70
+
71
+ if not use_negative_prompt:
72
+ negative_prompt = "" # type: ignore
73
+
74
+ images = pipe(
75
+ prompt=prompt,
76
+ negative_prompt=negative_prompt,
77
+ width=width,
78
+ height=height,
79
+ guidance_scale=guidance_scale,
80
+ num_inference_steps=25,
81
+ num_images_per_prompt=1,
82
+ cross_attention_kwargs={"scale": 0.65},
83
+ output_type="pil",
84
+ ).images
85
+ image_paths = [save_image(img) for img in images]
86
+ print(image_paths)
87
+ return image_paths, seed
88
+
89
+ examples = [
90
+ "neon holography crystal cat",
91
+ "a cat eating a piece of cheese",
92
+ "an astronaut riding a horse in space",
93
+ "a cartoon of a boy playing with a tiger",
94
+ "a cute robot artist painting on an easel, concept art",
95
+ "a close up of a woman wearing a transparent, prismatic, elaborate nemeses headdress, over the should pose, brown skin-tone"
96
+ ]
97
+
98
+ css = '''
99
+ .gradio-container{max-width: 560px !important}
100
+ h1{text-align:center}
101
+ footer {
102
+ visibility: hidden
103
+ }
104
+ '''
105
+ with gr.Blocks(css=css, theme="pseudolab/huggingface-korea-theme") as demo:
106
+ gr.Markdown(DESCRIPTION)
107
+ gr.DuplicateButton(
108
+ value="Duplicate Space for private use",
109
+ elem_id="duplicate-button",
110
+ visible=False,
111
+ )
112
+
113
+ with gr.Group():
114
+ with gr.Row():
115
+ prompt = gr.Text(
116
+ label="Prompt",
117
+ show_label=False,
118
+ max_lines=1,
119
+ placeholder="Enter your prompt",
120
+ container=False,
121
+ )
122
+ run_button = gr.Button("Run", scale=0)
123
+ result = gr.Gallery(label="Result", columns=1, preview=True, show_label=False)
124
+ with gr.Accordion("Advanced options", open=False):
125
+ use_negative_prompt = gr.Checkbox(label="Use negative prompt", value=True)
126
+ negative_prompt = gr.Text(
127
+ label="Negative prompt",
128
+ lines=4,
129
+ max_lines=6,
130
+ value="""(deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation, (NSFW:1.25)""",
131
+ placeholder="Enter a negative prompt",
132
+ visible=True,
133
+ )
134
+ seed = gr.Slider(
135
+ label="Seed",
136
+ minimum=0,
137
+ maximum=MAX_SEED,
138
+ step=1,
139
+ value=0,
140
+ visible=True
141
+ )
142
+ randomize_seed = gr.Checkbox(label="Randomize seed", value=True)
143
+ with gr.Row(visible=True):
144
+ width = gr.Slider(
145
+ label="Width",
146
+ minimum=512,
147
+ maximum=2048,
148
+ step=8,
149
+ value=1024,
150
+ )
151
+ height = gr.Slider(
152
+ label="Height",
153
+ minimum=512,
154
+ maximum=2048,
155
+ step=8,
156
+ value=1024,
157
+ )
158
+ with gr.Row():
159
+ guidance_scale = gr.Slider(
160
+ label="Guidance Scale",
161
+ minimum=0.1,
162
+ maximum=20.0,
163
+ step=0.1,
164
+ value=6,
165
+ )
166
+
167
+ gr.Examples(
168
+ examples=examples,
169
+ inputs=prompt,
170
+ outputs=[result, seed],
171
+ fn=generate,
172
+ cache_examples=False,
173
+ )
174
+
175
+ use_negative_prompt.change(
176
+ fn=lambda x: gr.update(visible=x),
177
+ inputs=use_negative_prompt,
178
+ outputs=negative_prompt,
179
+ api_name=False,
180
+ )
181
+
182
+
183
+ gr.on(
184
+ triggers=[
185
+ prompt.submit,
186
+ negative_prompt.submit,
187
+ run_button.click,
188
+ ],
189
+ fn=generate,
190
+ inputs=[
191
+ prompt,
192
+ negative_prompt,
193
+ use_negative_prompt,
194
+ seed,
195
+ width,
196
+ height,
197
+ guidance_scale,
198
+ randomize_seed,
199
+ ],
200
+ outputs=[result, seed],
201
+ api_name="run",
202
+ )
203
+
204
+ if __name__ == "__main__":
205
+ demo.queue(max_size=20).launch(show_api=False, debug=False)
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ torch
2
+ diffusers
3
+ transformers
4
+ gradio
5
+ peft