Shreyas094 commited on
Commit
0f075d7
1 Parent(s): d23826b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -41
app.py CHANGED
@@ -215,7 +215,7 @@ def google_search(term, num_results=5, lang="en", timeout=5, safe="active", ssl_
215
  print(" No text extracted")
216
  return all_results
217
 
218
- def process_question(question, documents, history, temperature, top_p, repetition_penalty):
219
  global conversation_history
220
 
221
  embeddings = get_embeddings()
@@ -223,26 +223,24 @@ def process_question(question, documents, history, temperature, top_p, repetitio
223
  # Check the memory database for similar questions
224
  for prev_question, prev_answer in memory_database.items():
225
  similarity = get_similarity(question, prev_question)
226
- if similarity > 0.7:
227
  return prev_answer
228
 
229
- # Load the FAISS vector store if it exists
230
  if os.path.exists("faiss_database"):
231
  db = FAISS.load_local("faiss_database", embeddings, allow_dangerous_deserialization=True)
232
  relevant_docs = db.similarity_search(question, k=3)
233
  else:
234
  relevant_docs = []
235
 
236
- if len(relevant_docs) == 0:
237
- # Perform web search and update the vector store
238
  web_search_results = google_search(question, num_results=5)
239
  web_docs = [Document(page_content=res["text"] or "", metadata={"source": res["link"]}) for res in web_search_results if res["text"]]
240
 
241
  if web_docs:
242
  # Update the FAISS vector store with new documents
243
  create_or_update_database(web_docs, embeddings)
244
-
245
- # Reload the updated FAISS store and retrieve relevant documents
246
  db = FAISS.load_local("faiss_database", embeddings, allow_dangerous_deserialization=True)
247
  relevant_docs = db.similarity_search(question, k=3)
248
 
@@ -251,7 +249,7 @@ def process_question(question, documents, history, temperature, top_p, repetitio
251
  if is_related_to_history(question, history):
252
  context = "None"
253
  else:
254
- history_text = "\n".join([f"Q: {h['question']}\nA: {h['answer']}" for h in history])
255
  context = context if context else "None"
256
 
257
  prompt_text = ChatPromptTemplate(
@@ -306,43 +304,40 @@ def export_memory_db_to_excel():
306
  return excel_path
307
 
308
  with gr.Blocks() as demo:
309
- with gr.Tab("Upload PDF"):
310
- with gr.Row():
311
- pdf_file = gr.File(label="Upload PDF")
312
- with gr.Row():
313
- recursive_check = gr.Checkbox(label="Use Recursive Text Splitter")
314
- upload_button = gr.Button("Upload and Process")
315
- with gr.Row():
316
- upload_output = gr.Textbox(label="Upload Output")
317
 
318
- with gr.Tab("Ask Questions"):
319
- with gr.Row():
320
- question = gr.Textbox(label="Your Question")
321
- with gr.Row():
322
- temperature = gr.Slider(minimum=0.0, maximum=1.0, value=0.7, label="Temperature")
323
- top_p = gr.Slider(minimum=0.0, maximum=1.0, value=0.9, label="Top P")
324
- repetition_penalty = gr.Slider(minimum=0.0, maximum=2.0, value=1.0, label="Repetition Penalty")
325
- with gr.Row():
326
- ask_button = gr.Button("Ask")
327
- with gr.Row():
328
- answer = gr.Textbox(label="Answer")
329
 
330
- with gr.Tab("Clear Cache"):
331
- with gr.Row():
332
- clear_button = gr.Button("Clear Cache")
333
- with gr.Row():
334
- clear_output = gr.Textbox(label="Clear Output")
335
-
336
- with gr.Tab("Export Data"):
337
- with gr.Row():
338
- export_db_button = gr.Button("Export Database to Excel")
339
- export_db_output = gr.Textbox(label="Export Output")
340
- with gr.Row():
341
- export_memory_button = gr.Button("Export Memory DB to Excel")
342
- export_memory_output = gr.Textbox(label="Export Output")
343
 
344
  upload_button.click(process_uploaded_file, [pdf_file, recursive_check], upload_output)
345
- ask_button.click(process_question, [question, pdf_file, recursive_check, temperature, top_p, repetition_penalty], answer)
346
  clear_button.click(clear_cache, [], clear_output)
347
  export_db_button.click(extract_db_to_excel, [], export_db_output)
348
  export_memory_button.click(export_memory_db_to_excel, [], export_memory_output)
 
215
  print(" No text extracted")
216
  return all_results
217
 
218
+ def process_question(question, documents, history, temperature, top_p, repetition_penalty, enable_web_search):
219
  global conversation_history
220
 
221
  embeddings = get_embeddings()
 
223
  # Check the memory database for similar questions
224
  for prev_question, prev_answer in memory_database.items():
225
  similarity = get_similarity(question, prev_question)
226
+ if similarity > 0.8:
227
  return prev_answer
228
 
229
+ # Retrieve relevant documents from the vector store
230
  if os.path.exists("faiss_database"):
231
  db = FAISS.load_local("faiss_database", embeddings, allow_dangerous_deserialization=True)
232
  relevant_docs = db.similarity_search(question, k=3)
233
  else:
234
  relevant_docs = []
235
 
236
+ # Perform web search if enabled and no relevant documents found
237
+ if enable_web_search and len(relevant_docs) == 0:
238
  web_search_results = google_search(question, num_results=5)
239
  web_docs = [Document(page_content=res["text"] or "", metadata={"source": res["link"]}) for res in web_search_results if res["text"]]
240
 
241
  if web_docs:
242
  # Update the FAISS vector store with new documents
243
  create_or_update_database(web_docs, embeddings)
 
 
244
  db = FAISS.load_local("faiss_database", embeddings, allow_dangerous_deserialization=True)
245
  relevant_docs = db.similarity_search(question, k=3)
246
 
 
249
  if is_related_to_history(question, history):
250
  context = "None"
251
  else:
252
+ history_text = "\n".join([f"Q: {h['question']}\nA: {h['answer']}" for h in history]) if history else "None"
253
  context = context if context else "None"
254
 
255
  prompt_text = ChatPromptTemplate(
 
304
  return excel_path
305
 
306
  with gr.Blocks() as demo:
307
+ with gr.Row():
308
+ pdf_file = gr.File(label="Upload PDF")
309
+ with gr.Row():
310
+ recursive_check = gr.Checkbox(label="Use Recursive Text Splitter")
311
+ upload_button = gr.Button("Upload and Process")
312
+ with gr.Row():
313
+ upload_output = gr.Textbox(label="Upload Output")
 
314
 
315
+ with gr.Row():
316
+ question = gr.Textbox(label="Your Question")
317
+ with gr.Row():
318
+ temperature = gr.Slider(minimum=0.0, maximum=1.0, value=0.7, label="Temperature")
319
+ top_p = gr.Slider(minimum=0.0, maximum=1.0, value=0.9, label="Top P")
320
+ repetition_penalty = gr.Slider(minimum=0.0, maximum=2.0, value=1.0, label="Repetition Penalty")
321
+ web_search_check = gr.Checkbox(label="Enable Web Search")
322
+ with gr.Row():
323
+ ask_button = gr.Button("Ask")
324
+ with gr.Row():
325
+ answer = gr.Textbox(label="Answer")
326
 
327
+ with gr.Row():
328
+ clear_button = gr.Button("Clear Cache")
329
+ with gr.Row():
330
+ clear_output = gr.Textbox(label="Clear Output")
331
+
332
+ with gr.Row():
333
+ export_db_button = gr.Button("Export Database to Excel")
334
+ export_db_output = gr.Textbox(label="Export Output")
335
+ with gr.Row():
336
+ export_memory_button = gr.Button("Export Memory DB to Excel")
337
+ export_memory_output = gr.Textbox(label="Export Output")
 
 
338
 
339
  upload_button.click(process_uploaded_file, [pdf_file, recursive_check], upload_output)
340
+ ask_button.click(process_question, [question, pdf_file, conversation_history, temperature, top_p, repetition_penalty, web_search_check], answer)
341
  clear_button.click(clear_cache, [], clear_output)
342
  export_db_button.click(extract_db_to_excel, [], export_db_output)
343
  export_memory_button.click(export_memory_db_to_excel, [], export_memory_output)