SiraH commited on
Commit
4c5f2fb
1 Parent(s): 11debf7

comment chat system

Browse files
Files changed (1) hide show
  1. app.py +42 -34
app.py CHANGED
@@ -202,12 +202,11 @@ def load_embeddings():
202
  return embeddings
203
 
204
  def main():
205
- msgs = StreamlitChatMessageHistory(key="langchain_messages")
206
- print(msgs)
207
- if "messages" not in st.session_state:
208
- st.session_state.messages = []
209
 
210
- data = []
211
  # DB_FAISS_UPLOAD_PATH = "vectorstores/db_faiss"
212
  st.header("DOCUMENT QUESTION ANSWERING IS2")
213
  # directory = "data"
@@ -254,6 +253,15 @@ def main():
254
  return_source_documents = True,
255
  memory = memory,
256
  chain_type_kwargs = {"prompt":qa_prompt})
 
 
 
 
 
 
 
 
 
257
  # qa_chain = ConversationalRetrievalChain(
258
  # retriever =db.as_retriever(search_kwargs={'k':2}),
259
  # question_generator=question_generator,
@@ -264,44 +272,44 @@ def main():
264
  # #get_chat_history=lambda h :h
265
  # )
266
 
267
- for message in st.session_state.messages:
268
- with st.chat_message(message["role"]):
269
- st.markdown(message["content"])
270
 
271
- # Accept user input
272
- if query := st.chat_input("What is up?"):
273
- # Display user message in chat message container
274
- with st.chat_message("user"):
275
- st.markdown(query)
276
- # Add user message to chat history
277
- st.session_state.messages.append({"role": "user", "content": query})
278
 
279
- start = time.time()
280
 
281
- response = qa_chain({'query': query})
282
 
283
- # url_list = set([i.metadata['source'] for i in response['source_documents']])
284
- #print(f"condensed quesion : {question_generator.run({'chat_history': response['chat_history'], 'question' : query})}")
285
 
286
- with st.chat_message("assistant"):
287
- st.markdown(response['result'])
288
 
289
- end = time.time()
290
- st.write("Respone time:",int(end-start),"sec")
291
- print(response)
292
 
293
- # Add assistant response to chat history
294
- st.session_state.messages.append({"role": "assistant", "content": response['result']})
295
 
296
- # with st.expander("See the related documents"):
297
- # for count, url in enumerate(url_list):
298
- # #url_reg = regex_source(url)
299
- # st.write(str(count+1)+":", url)
300
 
301
- clear_button = st.button("Start new convo")
302
- if clear_button :
303
- st.session_state.messages = []
304
- qa_chain.memory.chat_memory.clear()
305
 
306
 
307
  if __name__ == '__main__':
 
202
  return embeddings
203
 
204
  def main():
205
+ # msgs = StreamlitChatMessageHistory(key="langchain_messages")
206
+ # print(msgs)
207
+ # if "messages" not in st.session_state:
208
+ # st.session_state.messages = []
209
 
 
210
  # DB_FAISS_UPLOAD_PATH = "vectorstores/db_faiss"
211
  st.header("DOCUMENT QUESTION ANSWERING IS2")
212
  # directory = "data"
 
253
  return_source_documents = True,
254
  memory = memory,
255
  chain_type_kwargs = {"prompt":qa_prompt})
256
+
257
+ query = st.text_input("ASK ABOUT THE DOCS:")
258
+ if query:
259
+ start = time.time()
260
+ response = qa_chain({'query': query})
261
+ st.write(response["result"])
262
+ end = time.time()
263
+ st.write("Respone time:",int(end-start),"sec")
264
+
265
  # qa_chain = ConversationalRetrievalChain(
266
  # retriever =db.as_retriever(search_kwargs={'k':2}),
267
  # question_generator=question_generator,
 
272
  # #get_chat_history=lambda h :h
273
  # )
274
 
275
+ # for message in st.session_state.messages:
276
+ # with st.chat_message(message["role"]):
277
+ # st.markdown(message["content"])
278
 
279
+ # # Accept user input
280
+ # if query := st.chat_input("What is up?"):
281
+ # # Display user message in chat message container
282
+ # with st.chat_message("user"):
283
+ # st.markdown(query)
284
+ # # Add user message to chat history
285
+ # st.session_state.messages.append({"role": "user", "content": query})
286
 
287
+ # start = time.time()
288
 
289
+ # response = qa_chain({'query': query})
290
 
291
+ # # url_list = set([i.metadata['source'] for i in response['source_documents']])
292
+ # #print(f"condensed quesion : {question_generator.run({'chat_history': response['chat_history'], 'question' : query})}")
293
 
294
+ # with st.chat_message("assistant"):
295
+ # st.markdown(response['result'])
296
 
297
+ # end = time.time()
298
+ # st.write("Respone time:",int(end-start),"sec")
299
+ # print(response)
300
 
301
+ # # Add assistant response to chat history
302
+ # st.session_state.messages.append({"role": "assistant", "content": response['result']})
303
 
304
+ # # with st.expander("See the related documents"):
305
+ # # for count, url in enumerate(url_list):
306
+ # # #url_reg = regex_source(url)
307
+ # # st.write(str(count+1)+":", url)
308
 
309
+ # clear_button = st.button("Start new convo")
310
+ # if clear_button :
311
+ # st.session_state.messages = []
312
+ # qa_chain.memory.chat_memory.clear()
313
 
314
 
315
  if __name__ == '__main__':