gli-mrunal commited on
Commit
85d8a15
1 Parent(s): a9ae0c8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -7
app.py CHANGED
@@ -22,8 +22,8 @@ from PyPDF2 import PdfReader
22
  from langchain.text_splitter import CharacterTextSplitter
23
  from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings
24
  from langchain.vectorstores import FAISS # FAISS instead of PineCone
25
- # from langchain.llms import OpenAI
26
- #from langchain.llms import HuggingFaceHub
27
  from langchain.chat_models import ChatOpenAI
28
  from langchain.memory import ConversationBufferMemory
29
  from langchain.chains import ConversationalRetrievalChain
@@ -53,13 +53,12 @@ def get_vectorstore(text_chunks):
53
  vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
54
  return vectorstore
55
 
56
-
57
- # google/flan-t5-xxl
58
 
59
  def get_conversation_chain(vectorstore):
60
- #llm = OpenAI()
61
- llm = ChatOpenAI()
62
- #llm = HuggingFaceHub(repo_id="bigscience/bloom", model_kwargs={"temperature":0.5, "max_length":512})
63
  memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
64
  conversation_chain = ConversationalRetrievalChain.from_llm(
65
  llm=llm,
 
22
  from langchain.text_splitter import CharacterTextSplitter
23
  from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings
24
  from langchain.vectorstores import FAISS # FAISS instead of PineCone
25
+ from langchain.llms import OpenAI
26
+ from langchain.llms import HuggingFaceHub
27
  from langchain.chat_models import ChatOpenAI
28
  from langchain.memory import ConversationBufferMemory
29
  from langchain.chains import ConversationalRetrievalChain
 
53
  vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
54
  return vectorstore
55
 
56
+
 
57
 
58
  def get_conversation_chain(vectorstore):
59
+ llm = OpenAI()
60
+ #llm = ChatOpenAI()
61
+ #llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512})
62
  memory = ConversationBufferMemory(memory_key='chat_history', return_messages=True)
63
  conversation_chain = ConversationalRetrievalChain.from_llm(
64
  llm=llm,