Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -10,6 +10,7 @@ from translatepy import Translator
|
|
10 |
#from huggingface_hub import hf_hub_download
|
11 |
import requests
|
12 |
import re
|
|
|
13 |
|
14 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
15 |
translator = Translator()
|
@@ -44,7 +45,7 @@ def enable_lora(lora_in, lora_add):
|
|
44 |
return lora_in
|
45 |
|
46 |
@spaces.GPU()
|
47 |
-
def generate_image(
|
48 |
prompt:str,
|
49 |
model:str,
|
50 |
width:int=768,
|
@@ -83,7 +84,7 @@ def generate_image(
|
|
83 |
|
84 |
return image1, image2, seed
|
85 |
|
86 |
-
def gen(
|
87 |
prompt:str,
|
88 |
lora_in:str="",
|
89 |
lora_add:str="",
|
@@ -95,7 +96,8 @@ def gen(
|
|
95 |
progress=gr.Progress(track_tqdm=True)
|
96 |
):
|
97 |
model = enable_lora(lora_in, lora_add)
|
98 |
-
|
|
|
99 |
|
100 |
|
101 |
|
|
|
10 |
#from huggingface_hub import hf_hub_download
|
11 |
import requests
|
12 |
import re
|
13 |
+
import asyncio
|
14 |
|
15 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
16 |
translator = Translator()
|
|
|
45 |
return lora_in
|
46 |
|
47 |
@spaces.GPU()
|
48 |
+
async def generate_image(
|
49 |
prompt:str,
|
50 |
model:str,
|
51 |
width:int=768,
|
|
|
84 |
|
85 |
return image1, image2, seed
|
86 |
|
87 |
+
async def gen(
|
88 |
prompt:str,
|
89 |
lora_in:str="",
|
90 |
lora_add:str="",
|
|
|
96 |
progress=gr.Progress(track_tqdm=True)
|
97 |
):
|
98 |
model = enable_lora(lora_in, lora_add)
|
99 |
+
image1, image2, seed = await generate_image(prompt,model,width,height,scales,steps,seed)
|
100 |
+
return image1, image2, seed
|
101 |
|
102 |
|
103 |
|