Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -36,15 +36,15 @@ def get_text_chunk(row_text):
36
 
37
 
38
  def get_vectorstore(text_chunk):
39
- embeddings = OpenAIEmbeddings(openai_api_key = os.getenv("OPENAI_API_KEY"))
40
- # embeddings = HuggingFaceInstructEmbeddings(model_name="hkunlp/instructor-xl")
41
  vector = FAISS.from_texts(text_chunk,embeddings)
42
  return vector
43
 
44
 
45
  def get_conversation_chain(vectorstores):
46
- llm = ChatOpenAI(openai_api_key = os.getenv("OPENAI_API_KEY"))
47
- # llm = HuggingFaceHub(repo_id="google/flan-t5-base", model_kwargs={"temperature":0.5, "max_length":512})
48
  memory = ConversationBufferMemory(memory_key = "chat_history",return_messages = True)
49
  conversation_chain = ConversationalRetrievalChain.from_llm(llm=llm,
50
  retriever=vectorstores.as_retriever(),
 
36
 
37
 
38
  def get_vectorstore(text_chunk):
39
+ #embeddings = OpenAIEmbeddings(openai_api_key = os.getenv("OPENAI_API_KEY"))
40
+ embeddings = HuggingFaceInstructEmbeddings(model_name="hkunlp/instructor-xl")
41
  vector = FAISS.from_texts(text_chunk,embeddings)
42
  return vector
43
 
44
 
45
  def get_conversation_chain(vectorstores):
46
+ #llm = ChatOpenAI(openai_api_key = os.getenv("OPENAI_API_KEY"))
47
+ llm = HuggingFaceHub(repo_id="google/flan-t5-base", model_kwargs={"temperature":0.5, "max_length":512})
48
  memory = ConversationBufferMemory(memory_key = "chat_history",return_messages = True)
49
  conversation_chain = ConversationalRetrievalChain.from_llm(llm=llm,
50
  retriever=vectorstores.as_retriever(),