7jimmy commited on
Commit
c10087d
·
1 Parent(s): ff4abe5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -5
app.py CHANGED
@@ -7,12 +7,8 @@ from langchain.vectorstores import FAISS
7
  from langchain.chat_models import ChatOpenAI
8
  from langchain.memory import ConversationBufferMemory
9
  from langchain.chains import ConversationalRetrievalChain
10
-
11
- # Add import for HuggingFaceHub
12
- from langchain.llms import HuggingFaceHub
13
-
14
- # Import htmlTemplates, assuming it's a local module
15
  from htmlTemplates import css, bot_template, user_template
 
16
 
17
  def get_pdf_text(pdf_docs):
18
  text = ""
@@ -22,6 +18,7 @@ def get_pdf_text(pdf_docs):
22
  text += page.extract_text()
23
  return text
24
 
 
25
  def get_text_chunks(text):
26
  text_splitter = CharacterTextSplitter(
27
  separator="\n",
@@ -32,12 +29,14 @@ def get_text_chunks(text):
32
  chunks = text_splitter.split_text(text)
33
  return chunks
34
 
 
35
  def get_vectorstore(text_chunks):
36
  embeddings = OpenAIEmbeddings()
37
  # embeddings = HuggingFaceInstructEmbeddings(model_name="hkunlp/instructor-xl")
38
  vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
39
  return vectorstore
40
 
 
41
  def get_conversation_chain(vectorstore):
42
  llm = ChatOpenAI()
43
  # llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512})
@@ -51,6 +50,7 @@ def get_conversation_chain(vectorstore):
51
  )
52
  return conversation_chain
53
 
 
54
  def handle_userinput(user_question):
55
  response = st.session_state.conversation({'question': user_question})
56
  st.session_state.chat_history = response['chat_history']
@@ -63,6 +63,7 @@ def handle_userinput(user_question):
63
  st.write(bot_template.replace(
64
  "{{MSG}}", message.content), unsafe_allow_html=True)
65
 
 
66
  def main():
67
  load_dotenv()
68
  st.set_page_config(page_title="Chat with multiple PDFs",
@@ -98,5 +99,7 @@ def main():
98
  st.session_state.conversation = get_conversation_chain(
99
  vectorstore)
100
 
 
101
  if __name__ == '__main__':
102
  main()
 
 
7
  from langchain.chat_models import ChatOpenAI
8
  from langchain.memory import ConversationBufferMemory
9
  from langchain.chains import ConversationalRetrievalChain
 
 
 
 
 
10
  from htmlTemplates import css, bot_template, user_template
11
+ from langchain.llms import HuggingFaceHub
12
 
13
  def get_pdf_text(pdf_docs):
14
  text = ""
 
18
  text += page.extract_text()
19
  return text
20
 
21
+
22
  def get_text_chunks(text):
23
  text_splitter = CharacterTextSplitter(
24
  separator="\n",
 
29
  chunks = text_splitter.split_text(text)
30
  return chunks
31
 
32
+
33
  def get_vectorstore(text_chunks):
34
  embeddings = OpenAIEmbeddings()
35
  # embeddings = HuggingFaceInstructEmbeddings(model_name="hkunlp/instructor-xl")
36
  vectorstore = FAISS.from_texts(texts=text_chunks, embedding=embeddings)
37
  return vectorstore
38
 
39
+
40
  def get_conversation_chain(vectorstore):
41
  llm = ChatOpenAI()
42
  # llm = HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512})
 
50
  )
51
  return conversation_chain
52
 
53
+
54
  def handle_userinput(user_question):
55
  response = st.session_state.conversation({'question': user_question})
56
  st.session_state.chat_history = response['chat_history']
 
63
  st.write(bot_template.replace(
64
  "{{MSG}}", message.content), unsafe_allow_html=True)
65
 
66
+
67
  def main():
68
  load_dotenv()
69
  st.set_page_config(page_title="Chat with multiple PDFs",
 
99
  st.session_state.conversation = get_conversation_chain(
100
  vectorstore)
101
 
102
+
103
  if __name__ == '__main__':
104
  main()
105
+