aheman20 commited on
Commit
26ec4e9
1 Parent(s): edea4d2

new update

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -81,14 +81,18 @@ with gr.Blocks() as demo:
81
  #Initalize lanchain - Conversation Retrieval Chain
82
  qa = ConversationalRetrievalChain.from_llm(llm, retriever=db.as_retriever(), memory=memory)
83
 
84
- response = qa({'question': prompt})
85
 
 
 
 
 
86
 
87
 
88
  #get response from QA Chain
89
  #response = qa({'question': user_message, "chat_history": chat_history})
90
  #append user message and respone to chat history
91
- chat_history.append((user_message, response["answer"]))
92
  return gr.update(value=""), chat_history
93
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False)
94
  clear.click(lambda: None, None, chatbot, queue=False)
 
81
  #Initalize lanchain - Conversation Retrieval Chain
82
  qa = ConversationalRetrievalChain.from_llm(llm, retriever=db.as_retriever(), memory=memory)
83
 
84
+ question_input = {'question': user_message, "chat_history": chat_history}
85
 
86
+
87
+ response = qa(question_input)
88
+
89
+ answer = response["answer"]
90
 
91
 
92
  #get response from QA Chain
93
  #response = qa({'question': user_message, "chat_history": chat_history})
94
  #append user message and respone to chat history
95
+ chat_history.append((user_message, answer))
96
  return gr.update(value=""), chat_history
97
  msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False)
98
  clear.click(lambda: None, None, chatbot, queue=False)