Spaces:
Build error
Build error
Upload 3 files
Browse files- app.py +120 -0
- requirements.txt +23 -0
- style.css +213 -0
app.py
ADDED
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import base64
|
2 |
+
from io import BytesIO
|
3 |
+
|
4 |
+
import gradio as gr
|
5 |
+
import PIL.Image
|
6 |
+
import torch
|
7 |
+
|
8 |
+
from diffusers import StableDiffusionPipeline, AutoencoderKL, AutoencoderTiny
|
9 |
+
import torch
|
10 |
+
|
11 |
+
import torch
|
12 |
+
|
13 |
+
device = "cpu" # Linux & Windows
|
14 |
+
weight_type = torch.float32 # torch.float16 works as well, but pictures seem to be a bit worse
|
15 |
+
|
16 |
+
pipe = StableDiffusionPipeline.from_pretrained("IDKiro/sdxs-512-dreamshaper", torch_dtype=weight_type)
|
17 |
+
pipe.to(torch_device=device, torch_dtype=weight_type)
|
18 |
+
|
19 |
+
vae_tiny = AutoencoderTiny.from_pretrained("IDKiro/sdxs-512-dreamshaper", subfolder="vae")
|
20 |
+
vae_tiny.to(device, dtype=weight_type)
|
21 |
+
|
22 |
+
vae_large = AutoencoderKL.from_pretrained("IDKiro/sdxs-512-dreamshaper", subfolder="vae_large")
|
23 |
+
vae_tiny.to(device, dtype=weight_type)
|
24 |
+
|
25 |
+
def pil_image_to_data_url(img, format="PNG"):
|
26 |
+
buffered = BytesIO()
|
27 |
+
img.save(buffered, format=format)
|
28 |
+
img_str = base64.b64encode(buffered.getvalue()).decode()
|
29 |
+
return f"data:image/{format.lower()};base64,{img_str}"
|
30 |
+
|
31 |
+
|
32 |
+
def run(
|
33 |
+
prompt: str,
|
34 |
+
device_type="GPU",
|
35 |
+
vae_type=None,
|
36 |
+
param_dtype='torch.float16',
|
37 |
+
) -> PIL.Image.Image:
|
38 |
+
if vae_type == "tiny vae":
|
39 |
+
pipe.vae = vae_tiny
|
40 |
+
elif vae_type == "large vae":
|
41 |
+
pipe.vae = vae_large
|
42 |
+
|
43 |
+
if device_type == "CPU":
|
44 |
+
device = "cpu"
|
45 |
+
param_dtype = 'torch.float32'
|
46 |
+
else:
|
47 |
+
device = "cuda"
|
48 |
+
|
49 |
+
pipe.to(torch_device=device, torch_dtype=torch.float16 if param_dtype == 'torch.float16' else torch.float32)
|
50 |
+
|
51 |
+
result = pipe(
|
52 |
+
prompt=prompt,
|
53 |
+
guidance_scale=0.0,
|
54 |
+
num_inference_steps=1,
|
55 |
+
output_type="pil",
|
56 |
+
).images[0]
|
57 |
+
|
58 |
+
result_url = pil_image_to_data_url(result)
|
59 |
+
|
60 |
+
return (result, result_url)
|
61 |
+
|
62 |
+
|
63 |
+
examples = [
|
64 |
+
"A photo of beautiful mountain with realistic sunset and blue lake, highly detailed, masterpiece",
|
65 |
+
]
|
66 |
+
|
67 |
+
with gr.Blocks(css="style.css") as demo:
|
68 |
+
gr.Markdown("# SDXS-512-DreamShaper (only CPU now)")
|
69 |
+
with gr.Group():
|
70 |
+
with gr.Row():
|
71 |
+
with gr.Column(min_width=685):
|
72 |
+
with gr.Row():
|
73 |
+
prompt = gr.Text(
|
74 |
+
label="Prompt",
|
75 |
+
show_label=False,
|
76 |
+
max_lines=1,
|
77 |
+
placeholder="Enter your prompt",
|
78 |
+
container=False,
|
79 |
+
)
|
80 |
+
run_button = gr.Button("Run", scale=0)
|
81 |
+
|
82 |
+
device_choices = ['GPU','CPU']
|
83 |
+
device_type = gr.Radio(device_choices, label='Device',
|
84 |
+
value=device_choices[0],
|
85 |
+
interactive=True,
|
86 |
+
info='Only CPU now.')
|
87 |
+
|
88 |
+
vae_choices = ['tiny vae','large vae']
|
89 |
+
vae_type = gr.Radio(vae_choices, label='Image Decoder Type',
|
90 |
+
value=vae_choices[0],
|
91 |
+
interactive=True,
|
92 |
+
info='To save GPU memory, use tiny vae. For better quality, use large vae.')
|
93 |
+
|
94 |
+
dtype_choices = ['torch.float16','torch.float32']
|
95 |
+
param_dtype = gr.Radio(dtype_choices,label='torch.weight_type',
|
96 |
+
value=dtype_choices[0],
|
97 |
+
interactive=True,
|
98 |
+
info='To save GPU memory, use torch.float16. For better quality, use torch.float32.')
|
99 |
+
|
100 |
+
download_output = gr.Button("Download output", elem_id="download_output")
|
101 |
+
|
102 |
+
with gr.Column(min_width=512):
|
103 |
+
result = gr.Image(label="Result", height=512, width=512, elem_id="output_image", show_label=False, show_download_button=True)
|
104 |
+
|
105 |
+
gr.Examples(
|
106 |
+
examples=examples,
|
107 |
+
inputs=prompt,
|
108 |
+
outputs=result,
|
109 |
+
fn=run
|
110 |
+
)
|
111 |
+
|
112 |
+
demo.load(None,None,None)
|
113 |
+
|
114 |
+
inputs = [prompt, device_type, vae_type, param_dtype]
|
115 |
+
outputs = [result, download_output]
|
116 |
+
prompt.submit(fn=run, inputs=inputs, outputs=outputs)
|
117 |
+
run_button.click(fn=run, inputs=inputs, outputs=outputs)
|
118 |
+
|
119 |
+
if __name__ == "__main__":
|
120 |
+
demo.queue().launch(debug=True)
|
requirements.txt
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
einops>=0.6.1
|
2 |
+
numpy>=1.24.4
|
3 |
+
opencv-python==4.6.0.66
|
4 |
+
pillow>=9.5.0
|
5 |
+
scipy==1.11.1
|
6 |
+
timm>=0.9.2
|
7 |
+
tokenizers
|
8 |
+
torch>=2.0.1
|
9 |
+
torchaudio>=2.0.2
|
10 |
+
torchdata==0.6.1
|
11 |
+
torchvision>=0.15.2
|
12 |
+
tqdm>=4.65.0
|
13 |
+
transformers
|
14 |
+
triton==2.0.0
|
15 |
+
urllib3<1.27,>=1.25.4
|
16 |
+
xformers>=0.0.20
|
17 |
+
accelerate
|
18 |
+
streamlit-keyup==0.2.0
|
19 |
+
peft
|
20 |
+
dominate
|
21 |
+
diffusers==0.25.1
|
22 |
+
gradio==3.43.1
|
23 |
+
hf_transfer
|
style.css
ADDED
@@ -0,0 +1,213 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
@import url('https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.1/css/all.min.css');
|
2 |
+
|
3 |
+
/* the outermost contrained of the app */
|
4 |
+
.main{
|
5 |
+
display: flex;
|
6 |
+
justify-content: center;
|
7 |
+
align-items: center;
|
8 |
+
width: 1200px;
|
9 |
+
}
|
10 |
+
|
11 |
+
/* #main_row{
|
12 |
+
|
13 |
+
} */
|
14 |
+
|
15 |
+
/* hide this class */
|
16 |
+
.svelte-p4aq0j {
|
17 |
+
display: none;
|
18 |
+
}
|
19 |
+
|
20 |
+
.wrap.svelte-p4aq0j.svelte-p4aq0j {
|
21 |
+
display: none;
|
22 |
+
}
|
23 |
+
|
24 |
+
#download_sketch{
|
25 |
+
display: none;
|
26 |
+
}
|
27 |
+
|
28 |
+
#download_output{
|
29 |
+
display: none;
|
30 |
+
}
|
31 |
+
|
32 |
+
#column_input, #column_output{
|
33 |
+
width: 500px;
|
34 |
+
display: flex;
|
35 |
+
/* justify-content: center; */
|
36 |
+
align-items: center;
|
37 |
+
}
|
38 |
+
|
39 |
+
#tools_header, #input_header, #output_header, #process_header {
|
40 |
+
display: flex;
|
41 |
+
justify-content: center;
|
42 |
+
align-items: center;
|
43 |
+
width: 400px;
|
44 |
+
}
|
45 |
+
|
46 |
+
|
47 |
+
#nn{
|
48 |
+
width: 100px;
|
49 |
+
height: 100px;
|
50 |
+
}
|
51 |
+
|
52 |
+
|
53 |
+
#column_process{
|
54 |
+
display: flex;
|
55 |
+
justify-content: center; /* Center horizontally */
|
56 |
+
align-items: center; /* Center vertically */
|
57 |
+
height: 600px;
|
58 |
+
}
|
59 |
+
|
60 |
+
/* this is the "pix2pix-turbo" above the process button */
|
61 |
+
#description > span{
|
62 |
+
display: flex;
|
63 |
+
justify-content: center; /* Center horizontally */
|
64 |
+
align-items: center; /* Center vertically */
|
65 |
+
}
|
66 |
+
|
67 |
+
/* this is the "UNDO_BUTTON, X_BUTTON" */
|
68 |
+
div.svelte-1030q2h{
|
69 |
+
width: 30px;
|
70 |
+
height: 30px;
|
71 |
+
display: none;
|
72 |
+
}
|
73 |
+
|
74 |
+
|
75 |
+
#component-5 > div{
|
76 |
+
border: 0px;
|
77 |
+
box-shadow: none;
|
78 |
+
}
|
79 |
+
|
80 |
+
#cb-eraser, #cb-line{
|
81 |
+
display: none;
|
82 |
+
}
|
83 |
+
|
84 |
+
/* eraser text */
|
85 |
+
#cb-eraser > label > span{
|
86 |
+
display: none;
|
87 |
+
}
|
88 |
+
#cb-line > label > span{
|
89 |
+
display: none;
|
90 |
+
}
|
91 |
+
|
92 |
+
|
93 |
+
.button-row {
|
94 |
+
display: flex;
|
95 |
+
justify-content: center;
|
96 |
+
align-items: center;
|
97 |
+
height: 50px;
|
98 |
+
border: 0px;
|
99 |
+
}
|
100 |
+
|
101 |
+
#my-toggle-pencil{
|
102 |
+
background-image: url("https://icons.getbootstrap.com/assets/icons/pencil.svg");
|
103 |
+
background-color: white;
|
104 |
+
background-size: cover;
|
105 |
+
margin: 0px;
|
106 |
+
box-shadow: none;
|
107 |
+
width: 40px;
|
108 |
+
height: 40px;
|
109 |
+
}
|
110 |
+
|
111 |
+
#my-toggle-pencil.clicked{
|
112 |
+
background-image: url("https://icons.getbootstrap.com/assets/icons/pencil-fill.svg");
|
113 |
+
transform: scale(0.98);
|
114 |
+
background-color: gray;
|
115 |
+
background-size: cover;
|
116 |
+
/* background-size: 95%;
|
117 |
+
background-position: center; */
|
118 |
+
/* border: 2px solid #000; */
|
119 |
+
margin: 0px;
|
120 |
+
box-shadow: none;
|
121 |
+
width: 40px;
|
122 |
+
height: 40px;
|
123 |
+
}
|
124 |
+
|
125 |
+
|
126 |
+
#my-toggle-eraser{
|
127 |
+
background-image: url("https://icons.getbootstrap.com/assets/icons/eraser.svg");
|
128 |
+
background-color: white;
|
129 |
+
background-color: white;
|
130 |
+
background-size: cover;
|
131 |
+
margin: 0px;
|
132 |
+
box-shadow: none;
|
133 |
+
width: 40px;
|
134 |
+
height: 40px;
|
135 |
+
}
|
136 |
+
|
137 |
+
#my-toggle-eraser.clicked{
|
138 |
+
background-image: url("https://icons.getbootstrap.com/assets/icons/eraser-fill.svg");
|
139 |
+
transform: scale(0.98);
|
140 |
+
background-color: gray;
|
141 |
+
background-size: cover;
|
142 |
+
margin: 0px;
|
143 |
+
box-shadow: none;
|
144 |
+
width: 40px;
|
145 |
+
height: 40px;
|
146 |
+
}
|
147 |
+
|
148 |
+
|
149 |
+
|
150 |
+
#my-button-undo{
|
151 |
+
background-image: url("https://icons.getbootstrap.com/assets/icons/arrow-counterclockwise.svg");
|
152 |
+
background-color: white;
|
153 |
+
background-size: cover;
|
154 |
+
margin: 0px;
|
155 |
+
box-shadow: none;
|
156 |
+
width: 40px;
|
157 |
+
height: 40px;
|
158 |
+
}
|
159 |
+
|
160 |
+
#my-button-clear{
|
161 |
+
background-image: url("https://icons.getbootstrap.com/assets/icons/x-lg.svg");
|
162 |
+
background-color: white;
|
163 |
+
background-size: cover;
|
164 |
+
margin: 0px;
|
165 |
+
box-shadow: none;
|
166 |
+
width: 40px;
|
167 |
+
height: 40px;
|
168 |
+
|
169 |
+
}
|
170 |
+
|
171 |
+
|
172 |
+
#my-button-down{
|
173 |
+
background-image: url("https://icons.getbootstrap.com/assets/icons/arrow-down.svg");
|
174 |
+
background-color: white;
|
175 |
+
background-size: cover;
|
176 |
+
margin: 0px;
|
177 |
+
box-shadow: none;
|
178 |
+
width: 40px;
|
179 |
+
height: 40px;
|
180 |
+
|
181 |
+
}
|
182 |
+
|
183 |
+
.pad2{
|
184 |
+
padding: 2px;
|
185 |
+
background-color: white;
|
186 |
+
border: 2px solid #000;
|
187 |
+
margin: 10px;
|
188 |
+
display: flex;
|
189 |
+
justify-content: center; /* Center horizontally */
|
190 |
+
align-items: center; /* Center vertically */
|
191 |
+
}
|
192 |
+
|
193 |
+
|
194 |
+
|
195 |
+
|
196 |
+
#output_image, #input_image{
|
197 |
+
border-radius: 0px;
|
198 |
+
border: 5px solid #000;
|
199 |
+
border-width: none;
|
200 |
+
}
|
201 |
+
|
202 |
+
|
203 |
+
#output_image > img{
|
204 |
+
border: 5px solid #000;
|
205 |
+
border-radius: 0px;
|
206 |
+
border-width: none;
|
207 |
+
}
|
208 |
+
|
209 |
+
#input_image > div.image-container.svelte-p3y7hu > div.wrap.svelte-yigbas > canvas:nth-child(1){
|
210 |
+
border: 5px solid #000;
|
211 |
+
border-radius: 0px;
|
212 |
+
border-width: none;
|
213 |
+
}
|