Spaces:
Running
Running
yizhangliu
commited on
Commit
•
6d2e8db
1
Parent(s):
98f817f
Update app.py
Browse files
app.py
CHANGED
@@ -70,6 +70,19 @@ HF_TOKEN_SD = os.environ.get('HF_TOKEN_SD')
|
|
70 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
71 |
print(f'device = {device}')
|
72 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
73 |
model = None
|
74 |
|
75 |
def model_process(image, mask, alpha_channel, ext):
|
@@ -171,13 +184,6 @@ transform = transforms.Compose([
|
|
171 |
])
|
172 |
'''
|
173 |
|
174 |
-
def read_content(file_path):
|
175 |
-
"""read the content of target file
|
176 |
-
"""
|
177 |
-
with open(file_path, 'rb') as f:
|
178 |
-
content = f.read()
|
179 |
-
|
180 |
-
return content
|
181 |
|
182 |
image_type = 'filepath' #'pil'
|
183 |
def predict(input):
|
@@ -307,17 +313,17 @@ with image_blocks as demo:
|
|
307 |
with gr.Box():
|
308 |
with gr.Row():
|
309 |
with gr.Column():
|
310 |
-
image = gr.Image(source='upload', tool='sketch',type=f'{image_type}', label="Upload")
|
311 |
with gr.Row(elem_id="prompt-container").style(mobile_collapse=False, equal_height=True):
|
312 |
# prompt = gr.Textbox(placeholder = 'Your prompt (what you want in place of what is erased)', show_label=False, elem_id="input-text")
|
313 |
-
|
314 |
margin=True,
|
315 |
rounded=(True, True, True, True),
|
316 |
full_width=True,
|
317 |
)
|
318 |
|
319 |
# with gr.Column():
|
320 |
-
image_out = gr.Image(label="Output")
|
321 |
'''
|
322 |
with gr.Group(elem_id="share-btn-container"):
|
323 |
community_icon = gr.HTML(community_icon_html, visible=False)
|
@@ -328,7 +334,7 @@ with image_blocks as demo:
|
|
328 |
|
329 |
|
330 |
# btn.click(fn=predict, inputs=[image, prompt], outputs=[image_out, community_icon, loading_icon, share_button])
|
331 |
-
|
332 |
#share_button.click(None, [], [], _js=share_js)
|
333 |
|
334 |
|
|
|
70 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
71 |
print(f'device = {device}')
|
72 |
|
73 |
+
def get_image_ext(img_bytes):
|
74 |
+
w = imghdr.what("", img_bytes)
|
75 |
+
if w is None:
|
76 |
+
w = "jpeg"
|
77 |
+
return w
|
78 |
+
|
79 |
+
def read_content(file_path):
|
80 |
+
"""read the content of target file
|
81 |
+
"""
|
82 |
+
with open(file_path, 'rb') as f:
|
83 |
+
content = f.read()
|
84 |
+
return content
|
85 |
+
|
86 |
model = None
|
87 |
|
88 |
def model_process(image, mask, alpha_channel, ext):
|
|
|
184 |
])
|
185 |
'''
|
186 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
187 |
|
188 |
image_type = 'filepath' #'pil'
|
189 |
def predict(input):
|
|
|
313 |
with gr.Box():
|
314 |
with gr.Row():
|
315 |
with gr.Column():
|
316 |
+
image = gr.Image(source='upload', elem_id='input_img', tool='sketch', type=f'{image_type}', label="Upload") #.style(height=512)
|
317 |
with gr.Row(elem_id="prompt-container").style(mobile_collapse=False, equal_height=True):
|
318 |
# prompt = gr.Textbox(placeholder = 'Your prompt (what you want in place of what is erased)', show_label=False, elem_id="input-text")
|
319 |
+
btn_in = gr.Button("Done!").style(
|
320 |
margin=True,
|
321 |
rounded=(True, True, True, True),
|
322 |
full_width=True,
|
323 |
)
|
324 |
|
325 |
# with gr.Column():
|
326 |
+
image_out = gr.Image(label="Output", elem_id='output_img', visible=False) #.style(height=512)
|
327 |
'''
|
328 |
with gr.Group(elem_id="share-btn-container"):
|
329 |
community_icon = gr.HTML(community_icon_html, visible=False)
|
|
|
334 |
|
335 |
|
336 |
# btn.click(fn=predict, inputs=[image, prompt], outputs=[image_out, community_icon, loading_icon, share_button])
|
337 |
+
btn_in.click(fn=predict, inputs=[image], outputs=[image]) #, community_icon, loading_icon, share_button])
|
338 |
#share_button.click(None, [], [], _js=share_js)
|
339 |
|
340 |
|