Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -29,6 +29,38 @@ date_time_str = now.strftime("%Y-%m-%d %H:%M:%S")
|
|
29 |
#client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
30 |
history = []
|
31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
def format_prompt(message, history):
|
33 |
prompt = "<s>"
|
34 |
for user_prompt, bot_response in history:
|
@@ -155,8 +187,12 @@ with gr.Blocks(css=style) as iface:
|
|
155 |
submit_b = gr.Button()
|
156 |
stop_b = gr.Button("Stop")
|
157 |
clear = gr.ClearButton([msg, chatbot])
|
|
|
158 |
with gr.Column(scale=2):
|
159 |
sumbox=gr.Image(label="Image")
|
|
|
|
|
|
|
160 |
sub_b = submit_b.click(run, [msg,chatbot,sumbox,model_drop,chat_model_drop,agent_choice],[chatbot,sumbox],concurrency_limit=20)
|
161 |
sub_e = msg.submit(run, [msg, chatbot,sumbox,model_drop,chat_model_drop,agent_choice], [chatbot,sumbox],concurrency_limit=20)
|
162 |
stop_b.click(None,None,None, cancels=[sub_b,sub_e])
|
|
|
29 |
#client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
30 |
history = []
|
31 |
|
32 |
+
def gen_from_infer(in_prompt,history,model_drop):
|
33 |
+
#out_img = infer(out_prompt)
|
34 |
+
out_prompt=generate(purpose,history,chat_drop)
|
35 |
+
history.append((purpose,out_prompt))
|
36 |
+
yield (history,None)
|
37 |
+
|
38 |
+
out_img=InferenceClient().text_to_image(
|
39 |
+
prompt=out_prompt,
|
40 |
+
negative_prompt=None,
|
41 |
+
height=512,
|
42 |
+
width=512,
|
43 |
+
num_inference_steps=None,
|
44 |
+
guidance_scale=None,
|
45 |
+
model=loaded_model[int(model_drop)],
|
46 |
+
seed=1,
|
47 |
+
)
|
48 |
+
|
49 |
+
#return (history,None)
|
50 |
+
print(out_img)
|
51 |
+
url=f'https://johann22-chat-diffusion-describe.hf.space/file={out_img}'
|
52 |
+
print(url)
|
53 |
+
uid = uuid.uuid4()
|
54 |
+
#urllib.request.urlretrieve(image, 'tmp.png')
|
55 |
+
#out=Image.open('tmp.png')
|
56 |
+
r = requests.get(url, stream=True)
|
57 |
+
if r.status_code == 200:
|
58 |
+
out = Image.open(io.BytesIO(r.content))
|
59 |
+
#yield ([(purpose,out_prompt)],out)
|
60 |
+
yield (history,out)
|
61 |
+
else:
|
62 |
+
yield ([(purpose,"an Error occured")],None)
|
63 |
+
|
64 |
def format_prompt(message, history):
|
65 |
prompt = "<s>"
|
66 |
for user_prompt, bot_response in history:
|
|
|
187 |
submit_b = gr.Button()
|
188 |
stop_b = gr.Button("Stop")
|
189 |
clear = gr.ClearButton([msg, chatbot])
|
190 |
+
test_btn = gr.Button("Test")
|
191 |
with gr.Column(scale=2):
|
192 |
sumbox=gr.Image(label="Image")
|
193 |
+
|
194 |
+
run_test = test_btn.click(gen_from_infer, [msg,chatbot,sumbox,model_drop,chat_model_drop,agent_choice],[chatbot,sumbox],concurrency_limit=20)
|
195 |
+
|
196 |
sub_b = submit_b.click(run, [msg,chatbot,sumbox,model_drop,chat_model_drop,agent_choice],[chatbot,sumbox],concurrency_limit=20)
|
197 |
sub_e = msg.submit(run, [msg, chatbot,sumbox,model_drop,chat_model_drop,agent_choice], [chatbot,sumbox],concurrency_limit=20)
|
198 |
stop_b.click(None,None,None, cancels=[sub_b,sub_e])
|