nickmuchi commited on
Commit
afff965
1 Parent(s): 060f21e

Update functions.py

Browse files
Files changed (1) hide show
  1. functions.py +9 -9
functions.py CHANGED
@@ -58,7 +58,7 @@ time_str = time.strftime("%d%m%Y-%H%M%S")
58
  HTML_WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem;
59
  margin-bottom: 2.5rem">{}</div> """
60
 
61
- memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
62
 
63
  #Stuff Chain Type Prompt template
64
 
@@ -230,15 +230,15 @@ def embed_text(query,embedding_model,_docsearch):
230
  temperature=0
231
  )
232
 
233
- chain = RetrievalQA.from_chain_type(llm=chat_llm, chain_type="stuff",
234
- retriever=_docsearch.as_retriever(),
235
- return_source_documents=True)
236
 
237
- # chain = ConversationalRetrievalChain.from_llm(chat_llm,
238
- # retriever= _docsearch.as_retriever(),
239
- # # condense_question_prompt = load_prompt(),
240
- # memory = memory,
241
- # return_source_documents=True)
242
 
243
  answer = chain({"query": query})
244
 
 
58
  HTML_WRAPPER = """<div style="overflow-x: auto; border: 1px solid #e6e9ef; border-radius: 0.25rem; padding: 1rem;
59
  margin-bottom: 2.5rem">{}</div> """
60
 
61
+ memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True, output_key='answer')
62
 
63
  #Stuff Chain Type Prompt template
64
 
 
230
  temperature=0
231
  )
232
 
233
+ # chain = RetrievalQA.from_chain_type(llm=chat_llm, chain_type="stuff",
234
+ # retriever=_docsearch.as_retriever(),
235
+ # return_source_documents=True)
236
 
237
+ chain = ConversationalRetrievalChain.from_llm(chat_llm,
238
+ retriever= _docsearch.as_retriever(),
239
+ qa_prompt = load_prompt(),
240
+ memory = memory,
241
+ return_source_documents=True)
242
 
243
  answer = chain({"query": query})
244