Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -222,11 +222,14 @@ def demo():
|
|
222 |
outputs=[vector_db, collection_name, db_progress])
|
223 |
qachain_btn.click(initialize_LLM, \
|
224 |
inputs=[llm_btn, slider_temperature, slider_maxtokens, slider_topk, vector_db], \
|
225 |
-
outputs=[qa_chain, llm_progress]).then(lambda:[None,"",0,"",0,"",0], \
|
226 |
inputs=None, \
|
227 |
-
outputs=[chatbot,
|
228 |
-
|
229 |
-
|
|
|
|
|
|
|
230 |
|
231 |
# Chatbot events
|
232 |
msg.submit(conversation, \
|
@@ -245,15 +248,19 @@ def demo():
|
|
245 |
doc_source3, source3_page,
|
246 |
doc_source4, source4_page,
|
247 |
doc_source5, source5_page], queue=False)
|
248 |
-
clear_btn.click(
|
249 |
-
|
250 |
-
|
251 |
-
|
252 |
-
|
253 |
-
|
254 |
-
|
255 |
-
|
256 |
-
|
|
|
|
|
|
|
|
|
257 |
demo.queue().launch(debug=True)
|
258 |
|
259 |
|
|
|
222 |
outputs=[vector_db, collection_name, db_progress])
|
223 |
qachain_btn.click(initialize_LLM, \
|
224 |
inputs=[llm_btn, slider_temperature, slider_maxtokens, slider_topk, vector_db], \
|
225 |
+
outputs=[qa_chain, llm_progress]).then(lambda:[None, "", 0, "", 0, "", 0, "", 0, "", 0], \
|
226 |
inputs=None, \
|
227 |
+
outputs=[chatbot,
|
228 |
+
doc_source1, source1_page,
|
229 |
+
doc_source2, source2_page,
|
230 |
+
doc_source3, source3_page,
|
231 |
+
doc_source4, source4_page,
|
232 |
+
doc_source5, source5_page], queue=False)
|
233 |
|
234 |
# Chatbot events
|
235 |
msg.submit(conversation, \
|
|
|
248 |
doc_source3, source3_page,
|
249 |
doc_source4, source4_page,
|
250 |
doc_source5, source5_page], queue=False)
|
251 |
+
clear_btn.click(
|
252 |
+
lambda: [None, "", 0, "", 0, "", 0, "", 0, "", 0],
|
253 |
+
inputs=None,
|
254 |
+
outputs=[
|
255 |
+
chatbot,
|
256 |
+
doc_source1, source1_page,
|
257 |
+
doc_source2, source2_page,
|
258 |
+
doc_source3, source3_page,
|
259 |
+
doc_source4, source4_page,
|
260 |
+
doc_source5, source5_page
|
261 |
+
],
|
262 |
+
queue=False
|
263 |
+
)
|
264 |
demo.queue().launch(debug=True)
|
265 |
|
266 |
|