Silence1412 commited on
Commit
ceb2aec
1 Parent(s): 54a5241

Update Chat_with_pdf_LLM.py

Browse files
Files changed (1) hide show
  1. Chat_with_pdf_LLM.py +8 -3
Chat_with_pdf_LLM.py CHANGED
@@ -39,7 +39,7 @@ def LLM_pdf(model_name = 'google/flan-t5-large'):
39
  st.session_state['generated'] = []
40
  if 'past' not in st.session_state:
41
  st.session_state['past'] = []
42
- print(st.session_state['generated'],st.session_state['past'])
43
 
44
  # show user input
45
  user_question = st.text_input("Ask a question about your PDF:")
@@ -52,10 +52,15 @@ def LLM_pdf(model_name = 'google/flan-t5-large'):
52
  response = chain.run(input_documents=docs,question=user_question)
53
 
54
  #st.write(response)
 
 
55
  st.session_state.past.append(user_question)
56
  st.session_state.generated.append(response)
57
 
 
58
  if st.session_state['generated']:
 
59
  for i in range(len(st.session_state['generated'])-1, -1, -1):
60
- message(st.session_state["generated"][i], key=str(i))
61
- message(st.session_state['past'][i], is_user=True, key=str(i) + '_user')
 
 
39
  st.session_state['generated'] = []
40
  if 'past' not in st.session_state:
41
  st.session_state['past'] = []
42
+ # print(st.session_state['generated'],st.session_state['past'])
43
 
44
  # show user input
45
  user_question = st.text_input("Ask a question about your PDF:")
 
52
  response = chain.run(input_documents=docs,question=user_question)
53
 
54
  #st.write(response)
55
+
56
+ # # append user_input and output to state
57
  st.session_state.past.append(user_question)
58
  st.session_state.generated.append(response)
59
 
60
+ # If responses have been generated by the model
61
  if st.session_state['generated']:
62
+ # Reverse iteration through the list
63
  for i in range(len(st.session_state['generated'])-1, -1, -1):
64
+ # message from streamlit_chat
65
+ message(st.session_state['past'][::-1][i], is_user=True, key=str(i) + '_user')
66
+ message(st.session_state['generated'][::-1][i], key=str(i))