Spaces:
Sleeping
Sleeping
Upload main.py
Browse files
main.py
CHANGED
@@ -267,15 +267,24 @@ conversational_rag_chain = RunnableWithMessageHistory(
|
|
267 |
|
268 |
|
269 |
def get_response(userquery:str):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
270 |
|
271 |
-
|
272 |
-
|
273 |
-
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
|
|
|
279 |
|
280 |
##creating streamlit frontend now
|
281 |
|
@@ -286,6 +295,7 @@ st.title("Calm Counsel")
|
|
286 |
history = get_session_history(st.session_state.secret_id)
|
287 |
|
288 |
user_query = st.chat_input("Type your message here...")
|
|
|
289 |
sequence_number = 0
|
290 |
if user_query is not None and user_query != "":
|
291 |
|
@@ -304,21 +314,11 @@ if user_query is not None and user_query != "":
|
|
304 |
|
305 |
#adding user query to st history
|
306 |
st.session_state.msg_list.append({'role':'human','content':user_query})
|
307 |
-
|
308 |
-
|
309 |
-
#trying for dict to json
|
310 |
-
|
311 |
-
|
312 |
-
|
313 |
-
#get ai response
|
314 |
-
with st.spinner("Thinking..."):
|
315 |
-
response = get_response(user_query)
|
316 |
-
|
317 |
|
318 |
#display ai response
|
319 |
-
|
320 |
with st.chat_message("ai"):
|
321 |
-
st.
|
|
|
322 |
|
323 |
print(st.session_state.msg_list)
|
324 |
|
@@ -326,6 +326,7 @@ if user_query is not None and user_query != "":
|
|
326 |
print('Ii' * 100)
|
327 |
|
328 |
|
|
|
329 |
print('Mm' * 100)
|
330 |
|
331 |
|
|
|
267 |
|
268 |
|
269 |
def get_response(userquery:str):
|
270 |
+
response_generator = conversational_rag_chain.stream(
|
271 |
+
{"input": userquery},
|
272 |
+
config={"configurable": {"session_id": "abc123"}}
|
273 |
+
)
|
274 |
+
#len_query = len(user_query)
|
275 |
+
#c = -1
|
276 |
+
# Since it's a generator, iterate over the response
|
277 |
+
for rag_response in response_generator :
|
278 |
|
279 |
+
print(rag_response)
|
280 |
+
|
281 |
+
for k, v in rag_response.items():
|
282 |
+
#if k == 'context':
|
283 |
+
#continue
|
284 |
+
#elif k == 'chat_history':
|
285 |
+
#continue
|
286 |
+
if k == 'answer':
|
287 |
+
yield v
|
288 |
|
289 |
##creating streamlit frontend now
|
290 |
|
|
|
295 |
history = get_session_history(st.session_state.secret_id)
|
296 |
|
297 |
user_query = st.chat_input("Type your message here...")
|
298 |
+
|
299 |
sequence_number = 0
|
300 |
if user_query is not None and user_query != "":
|
301 |
|
|
|
314 |
|
315 |
#adding user query to st history
|
316 |
st.session_state.msg_list.append({'role':'human','content':user_query})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
317 |
|
318 |
#display ai response
|
|
|
319 |
with st.chat_message("ai"):
|
320 |
+
with st.spinner('Thinking...'):
|
321 |
+
response = st.write_stream(get_response(user_query))
|
322 |
|
323 |
print(st.session_state.msg_list)
|
324 |
|
|
|
326 |
print('Ii' * 100)
|
327 |
|
328 |
|
329 |
+
|
330 |
print('Mm' * 100)
|
331 |
|
332 |
|