Spaces:
Sleeping
Sleeping
Fix db error
Browse files
app.py
CHANGED
@@ -214,6 +214,15 @@ def main():
|
|
214 |
# db = FAISS.from_documents(sp_docs, embeddings)
|
215 |
# # db.save_local(DB_FAISS_UPLOAD_PATH)
|
216 |
# # st.write(f"Your model is already store in {DB_FAISS_UPLOAD_PATH}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
217 |
uploaded_file = st.file_uploader('Choose your .pdf file', type="pdf")
|
218 |
print(uploaded_file)
|
219 |
if uploaded_file is not None:
|
@@ -223,14 +232,6 @@ def main():
|
|
223 |
for page in pdf_reader.pages:
|
224 |
text += page.extract_text()
|
225 |
db = FAISS.from_texts(text, embeddings)
|
226 |
-
|
227 |
-
llm = load_llama2_llamaCpp()
|
228 |
-
qa_prompt = set_custom_prompt()
|
229 |
-
memory = ConversationBufferWindowMemory(k = 0, return_messages=True, input_key= 'question', output_key='answer', memory_key="chat_history")
|
230 |
-
#memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
231 |
-
doc_chain = load_qa_chain(llm, chain_type="stuff", prompt = qa_prompt)
|
232 |
-
question_generator = LLMChain(llm=llm, prompt=CONDENSE_QUESTION_PROMPT)
|
233 |
-
if db is not None :
|
234 |
qa_chain = ConversationalRetrievalChain(
|
235 |
retriever =db.as_retriever(search_type="similarity_score_threshold", search_kwargs={'k':3, "score_threshold": 0.7}),
|
236 |
question_generator=question_generator,
|
|
|
214 |
# db = FAISS.from_documents(sp_docs, embeddings)
|
215 |
# # db.save_local(DB_FAISS_UPLOAD_PATH)
|
216 |
# # st.write(f"Your model is already store in {DB_FAISS_UPLOAD_PATH}")
|
217 |
+
|
218 |
+
llm = load_llama2_llamaCpp()
|
219 |
+
qa_prompt = set_custom_prompt()
|
220 |
+
memory = ConversationBufferWindowMemory(k = 0, return_messages=True, input_key= 'question', output_key='answer', memory_key="chat_history")
|
221 |
+
#memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
|
222 |
+
doc_chain = load_qa_chain(llm, chain_type="stuff", prompt = qa_prompt)
|
223 |
+
question_generator = LLMChain(llm=llm, prompt=CONDENSE_QUESTION_PROMPT)
|
224 |
+
|
225 |
+
|
226 |
uploaded_file = st.file_uploader('Choose your .pdf file', type="pdf")
|
227 |
print(uploaded_file)
|
228 |
if uploaded_file is not None:
|
|
|
232 |
for page in pdf_reader.pages:
|
233 |
text += page.extract_text()
|
234 |
db = FAISS.from_texts(text, embeddings)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
235 |
qa_chain = ConversationalRetrievalChain(
|
236 |
retriever =db.as_retriever(search_type="similarity_score_threshold", search_kwargs={'k':3, "score_threshold": 0.7}),
|
237 |
question_generator=question_generator,
|