mikepastor11 commited on
Commit
c15629e
1 Parent(s): f72b341

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -16
app.py CHANGED
@@ -20,8 +20,9 @@ from langchain_community.vectorstores import FAISS
20
 
21
  from langchain.text_splitter import CharacterTextSplitter
22
 
23
- # from langchain.memory import ConversationBufferMemory
24
- # from langchain.chains import ConversationalRetrievalChain
 
25
  # from htmlTemplates import css, bot_template, user_template
26
  # from langchain.llms import HuggingFaceHub
27
 
@@ -74,20 +75,20 @@ def get_vectorstore(text_chunks):
74
 
75
  return vectorstore
76
 
77
- # def get_conversation_chain(vectorstore):
78
- # # llm = ChatOpenAI()
79
- # # llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512})
80
- # # google/bigbird-roberta-base facebook/bart-large
81
- # llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature": 0.5, "max_length": 512})
82
-
83
- # memory = ConversationBufferMemory(
84
- # memory_key='chat_history', return_messages=True)
85
- # conversation_chain = ConversationalRetrievalChain.from_llm(
86
- # llm=llm,
87
- # retriever=vectorstore.as_retriever(),
88
- # memory=memory,
89
- # )
90
- # return conversation_chain
91
 
92
  # def handle_userinput(user_question):
93
 
 
20
 
21
  from langchain.text_splitter import CharacterTextSplitter
22
 
23
+ from langchain.memory import ConversationBufferMemory
24
+ from langchain.chains import ConversationalRetrievalChain
25
+
26
  # from htmlTemplates import css, bot_template, user_template
27
  # from langchain.llms import HuggingFaceHub
28
 
 
75
 
76
  return vectorstore
77
 
78
+ def get_conversation_chain(vectorstore):
79
+ # llm = ChatOpenAI()
80
+ # llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512})
81
+ # google/bigbird-roberta-base facebook/bart-large
82
+ llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature": 0.5, "max_length": 512})
83
+
84
+ memory = ConversationBufferMemory(
85
+ memory_key='chat_history', return_messages=True)
86
+ conversation_chain = ConversationalRetrievalChain.from_llm(
87
+ llm=llm,
88
+ retriever=vectorstore.as_retriever(),
89
+ memory=memory,
90
+ )
91
+ return conversation_chain
92
 
93
  # def handle_userinput(user_question):
94