Spaces:
Sleeping
Sleeping
Gainward777
commited on
Commit
•
56373b9
1
Parent(s):
d3248bd
Update sd/zerogpu_controller.py
Browse files- sd/zerogpu_controller.py +86 -83
sd/zerogpu_controller.py
CHANGED
@@ -1,84 +1,87 @@
|
|
1 |
-
#from ui.gradio_ui import ui
|
2 |
-
import spaces
|
3 |
-
|
4 |
-
from sd.
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
#
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
|
|
|
|
|
|
84 |
#ui(get_first_result, get_second_result) #controller)
|
|
|
1 |
+
#from ui.gradio_ui import ui
|
2 |
+
import spaces
|
3 |
+
|
4 |
+
from sd.prompt_helper import helper
|
5 |
+
|
6 |
+
from sd.utils.utils import *
|
7 |
+
from utils.utils import sketch_process, prompt_preprocess
|
8 |
+
#from sd.sd_controller import Controller
|
9 |
+
|
10 |
+
#controller=Controller()
|
11 |
+
|
12 |
+
MODELS_NAMES=["cagliostrolab/animagine-xl-3.1",
|
13 |
+
"stabilityai/stable-diffusion-xl-base-1.0"]
|
14 |
+
LORA_PATH='sd/lora/lora.safetensors'
|
15 |
+
VAE=get_vae()
|
16 |
+
CONTROLNET=get_controlnet()
|
17 |
+
ADAPTER=get_adapter()
|
18 |
+
SCHEDULER=get_scheduler(model_name=MODELS_NAMES[1])
|
19 |
+
DETECTOR=get_detector()
|
20 |
+
|
21 |
+
FIRST_PIPE=get_pipe(vae=VAE,
|
22 |
+
model_name=MODELS_NAMES[0],
|
23 |
+
controlnet=CONTROLNET,
|
24 |
+
lora_path=LORA_PATH)
|
25 |
+
|
26 |
+
SECOND_PIPE=get_pipe(vae=VAE,
|
27 |
+
model_name=MODELS_NAMES[1],
|
28 |
+
adapter=ADAPTER,
|
29 |
+
scheduler=SCHEDULER)
|
30 |
+
|
31 |
+
|
32 |
+
@spaces.GPU
|
33 |
+
def get_first_result(img, prompt, negative_prompt,
|
34 |
+
controlnet_scale=0.5, strength=1.0,n_steps=30,eta=1.0):
|
35 |
+
|
36 |
+
substrate, resized_image = sketch_process(img)
|
37 |
+
prompt=prompt_preprocess(prompt)
|
38 |
+
|
39 |
+
FIRST_PIPE.to('cuda')
|
40 |
+
|
41 |
+
result=FIRST_PIPE(image=substrate,
|
42 |
+
control_image=resized_image,
|
43 |
+
strength=strength,
|
44 |
+
prompt=prompt,
|
45 |
+
negative_prompt = negative_prompt,
|
46 |
+
controlnet_conditioning_scale=float(controlnet_scale),
|
47 |
+
generator=torch.manual_seed(0),
|
48 |
+
num_inference_steps=n_steps,
|
49 |
+
eta=eta)
|
50 |
+
|
51 |
+
FIRST_PIPE.to('cpu')
|
52 |
+
|
53 |
+
return result.images[0]
|
54 |
+
|
55 |
+
|
56 |
+
@spaces.GPU
|
57 |
+
def get_second_result(img, prompt, negative_prompt,
|
58 |
+
g_scale=7.5, n_steps=25,
|
59 |
+
adapter_scale=0.9, adapter_factor=1.0):
|
60 |
+
|
61 |
+
DETECTOR.to('cuda')
|
62 |
+
SECOND_PIPE.to('cuda')
|
63 |
+
|
64 |
+
preprocessed_img=DETECTOR(img,
|
65 |
+
detect_resolution=1024,
|
66 |
+
image_resolution=1024,
|
67 |
+
apply_filter=True).convert("L")
|
68 |
+
|
69 |
+
result=SECOND_PIPE(prompt=prompt,
|
70 |
+
negative_prompt=negative_prompt,
|
71 |
+
image=preprocessed_img,
|
72 |
+
guidance_scale=g_scale,
|
73 |
+
num_inference_steps=n_steps,
|
74 |
+
adapter_conditioning_scale=adapter_scale,
|
75 |
+
adapter_conditioning_factor=adapter_factor,
|
76 |
+
generator = torch.manual_seed(42))
|
77 |
+
|
78 |
+
DETECTOR.to('cpu')
|
79 |
+
SECOND_PIPE.to('cpu')
|
80 |
+
|
81 |
+
return result.images[0]
|
82 |
+
|
83 |
+
|
84 |
+
def get_help_w_prompt(img):
|
85 |
+
return helper.get_help(img)
|
86 |
+
|
87 |
#ui(get_first_result, get_second_result) #controller)
|