Du Mingzhe commited on
Commit
35f80be
1 Parent(s): 00fe22c

Update logging...

Browse files
Files changed (1) hide show
  1. app.py +5 -1
app.py CHANGED
@@ -25,7 +25,9 @@ if prompt := st.chat_input("What's up?"):
25
  st.session_state.messages.append({"role": "user", "content": prompt})
26
  with st.chat_message("user"):
27
  st.markdown(prompt)
28
- with st.chat_message("assistant"):
 
 
29
  memory = pinecone_client.query_conversation(messages=st.session_state.messages[:-5], user=st.session_state['user'], top_k=3)
30
  print(f"Memory: {memory}")
31
 
@@ -38,6 +40,8 @@ if prompt := st.chat_input("What's up?"):
38
 
39
  stream = llm_client.response_generate(prompt, st.session_state.messages, memory, web_result)
40
  response = st.write_stream(stream)
 
 
41
  st.session_state.messages.append({"role": "assistant", "content": response})
42
 
43
  # Update seesion in Pinecone
 
25
  st.session_state.messages.append({"role": "user", "content": prompt})
26
  with st.chat_message("user"):
27
  st.markdown(prompt)
28
+ with st.chat_message("assistant"):
29
+ print(f"Prompt: {prompt}")
30
+
31
  memory = pinecone_client.query_conversation(messages=st.session_state.messages[:-5], user=st.session_state['user'], top_k=3)
32
  print(f"Memory: {memory}")
33
 
 
40
 
41
  stream = llm_client.response_generate(prompt, st.session_state.messages, memory, web_result)
42
  response = st.write_stream(stream)
43
+
44
+ print(f"Response: {response}")
45
  st.session_state.messages.append({"role": "assistant", "content": response})
46
 
47
  # Update seesion in Pinecone