Shreyas094 commited on
Commit
0c730b1
1 Parent(s): 6e76606

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -1
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import os
2
  import json
3
  import gradio as gr
 
4
  from tempfile import NamedTemporaryFile
5
 
6
  from langchain_core.prompts import ChatPromptTemplate
@@ -31,6 +32,8 @@ prompt = """
31
  Answer the question based only on the following context:
32
  {context}
33
  Question: {question}
 
 
34
  """
35
 
36
  def get_model():
@@ -47,7 +50,7 @@ def generate_chunked_response(model, prompt, max_tokens=500, max_chunks=5):
47
  full_response += chunk
48
  if chunk.strip().endswith((".", "!", "?")):
49
  break
50
- return full_response
51
 
52
  def response(database, model, question):
53
  prompt_val = ChatPromptTemplate.from_template(prompt)
@@ -77,6 +80,19 @@ def ask_question(question):
77
  model = get_model()
78
  return response(database, model, question)
79
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  with gr.Blocks() as demo:
81
  gr.Markdown("# Chat with your PDF documents")
82
 
@@ -93,6 +109,10 @@ with gr.Blocks() as demo:
93
 
94
  answer_output = gr.Textbox(label="Answer")
95
  submit_button.click(ask_question, inputs=[question_input], outputs=answer_output)
 
 
 
 
96
 
97
  if __name__ == "__main__":
98
  demo.launch()
 
1
  import os
2
  import json
3
  import gradio as gr
4
+ import pandas as pd
5
  from tempfile import NamedTemporaryFile
6
 
7
  from langchain_core.prompts import ChatPromptTemplate
 
32
  Answer the question based only on the following context:
33
  {context}
34
  Question: {question}
35
+
36
+ Provide a concise and direct answer to the question:
37
  """
38
 
39
  def get_model():
 
50
  full_response += chunk
51
  if chunk.strip().endswith((".", "!", "?")):
52
  break
53
+ return full_response.strip()
54
 
55
  def response(database, model, question):
56
  prompt_val = ChatPromptTemplate.from_template(prompt)
 
80
  model = get_model()
81
  return response(database, model, question)
82
 
83
+ def extract_db_to_excel():
84
+ embed = get_embeddings()
85
+ database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
86
+
87
+ documents = database.docstore._dict.values()
88
+ data = [{"page_content": doc.page_content, "metadata": json.dumps(doc.metadata)} for doc in documents]
89
+ df = pd.DataFrame(data)
90
+
91
+ excel_path = "database_output.xlsx"
92
+ df.to_excel(excel_path, index=False)
93
+
94
+ return f"Database extracted to {excel_path}"
95
+
96
  with gr.Blocks() as demo:
97
  gr.Markdown("# Chat with your PDF documents")
98
 
 
109
 
110
  answer_output = gr.Textbox(label="Answer")
111
  submit_button.click(ask_question, inputs=[question_input], outputs=answer_output)
112
+
113
+ extract_button = gr.Button("Extract Database to Excel")
114
+ extract_output = gr.Textbox(label="Extraction Status")
115
+ extract_button.click(extract_db_to_excel, inputs=[], outputs=extract_output)
116
 
117
  if __name__ == "__main__":
118
  demo.launch()