Spaces:
Sleeping
Sleeping
geekyrakshit
commited on
Commit
·
076d575
1
Parent(s):
aa14319
fix: app.py
Browse files
app.py
CHANGED
@@ -65,6 +65,9 @@ structured_output_model_name = st.sidebar.selectbox(
|
|
65 |
help="select a model from the list",
|
66 |
)
|
67 |
|
|
|
|
|
|
|
68 |
# Initialize Weave
|
69 |
weave.init(project_name=project_name)
|
70 |
|
@@ -83,27 +86,10 @@ medqa_assistant = MedQAAssistant(
|
|
83 |
llm_client=llm_client, retriever=retriever, figure_annotator=figure_annotator
|
84 |
)
|
85 |
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
# Initialize chat history
|
90 |
-
if "chat_history" not in st.session_state:
|
91 |
-
st.session_state.chat_history = []
|
92 |
-
|
93 |
-
# Display chat messages from history on app rerun
|
94 |
-
for message in st.session_state.chat_history:
|
95 |
-
with st.chat_message(message["role"]):
|
96 |
-
st.markdown(message["content"])
|
97 |
-
|
98 |
-
# Chat thread section with user input and response
|
99 |
-
if query := st.chat_input("What medical question can I assist you with today?"):
|
100 |
-
# Add user message to chat history
|
101 |
-
st.session_state.chat_history.append({"role": "user", "content": query})
|
102 |
with st.chat_message("user"):
|
103 |
st.markdown(query)
|
104 |
-
|
105 |
-
# Process query and get response
|
106 |
response = medqa_assistant.predict(query=query)
|
107 |
-
st.session_state.chat_history.append({"role": "assistant", "content": response})
|
108 |
with st.chat_message("assistant"):
|
109 |
st.markdown(response)
|
|
|
65 |
help="select a model from the list",
|
66 |
)
|
67 |
|
68 |
+
# Streamlit app layout
|
69 |
+
st.title("MedQA Assistant App")
|
70 |
+
|
71 |
# Initialize Weave
|
72 |
weave.init(project_name=project_name)
|
73 |
|
|
|
86 |
llm_client=llm_client, retriever=retriever, figure_annotator=figure_annotator
|
87 |
)
|
88 |
|
89 |
+
query = st.chat_input("Enter your question here")
|
90 |
+
if query:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
91 |
with st.chat_message("user"):
|
92 |
st.markdown(query)
|
|
|
|
|
93 |
response = medqa_assistant.predict(query=query)
|
|
|
94 |
with st.chat_message("assistant"):
|
95 |
st.markdown(response)
|