Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -65,6 +65,14 @@ def parse_output_llm_with_sources(output):
|
|
65 |
content_parts = "".join(parts)
|
66 |
return content_parts
|
67 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
async def chat(query,history,sources,reports):
|
69 |
"""taking a query and a message history, use a pipeline (reformulation, retriever, answering) to yield a tuple of:
|
70 |
(messages in gradio format, messages in langchain format, source documents)"""
|
@@ -279,7 +287,8 @@ with gr.Blocks(title="Audit Q&A", css= "style.css", theme=theme,elem_id = "main-
|
|
279 |
- **This is just a prototype and being tested and worked upon, so its not perfect and may sometimes give irrelevant answers**. If you are not satisfied with the answer, please ask a more specific question or report your feedback to help us improve the system.
|
280 |
""")
|
281 |
|
282 |
-
(textbox
|
|
|
283 |
demo.queue()
|
284 |
|
285 |
demo.launch()
|
|
|
65 |
content_parts = "".join(parts)
|
66 |
return content_parts
|
67 |
|
68 |
+
def start_chat(query,history):
|
69 |
+
history = history + [(query,None)]
|
70 |
+
history = [tuple(x) for x in history]
|
71 |
+
return (gr.update(interactive = False),gr.update(selected=1),history)
|
72 |
+
|
73 |
+
def finish_chat():
|
74 |
+
return (gr.update(interactive = True,value = ""))
|
75 |
+
|
76 |
async def chat(query,history,sources,reports):
|
77 |
"""taking a query and a message history, use a pipeline (reformulation, retriever, answering) to yield a tuple of:
|
78 |
(messages in gradio format, messages in langchain format, source documents)"""
|
|
|
287 |
- **This is just a prototype and being tested and worked upon, so its not perfect and may sometimes give irrelevant answers**. If you are not satisfied with the answer, please ask a more specific question or report your feedback to help us improve the system.
|
288 |
""")
|
289 |
|
290 |
+
(textbox
|
291 |
+
.submit(start_chat, [textbox,chatbot], [textbox,tabs,chatbot],queue = False,api_name = "start_chat_textbox"))
|
292 |
demo.queue()
|
293 |
|
294 |
demo.launch()
|