Manojkumarpandi commited on
Commit
37206b3
Β·
verified Β·
1 Parent(s): 91f6f96

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -68
app.py CHANGED
@@ -1,140 +1,107 @@
1
  import streamlit as st
2
  import google.generativeai as genai
3
  from langchain.document_loaders import PyPDFDirectoryLoader
 
4
  import os
5
- import shutil
6
 
7
- # Configuration
8
- GOOGLE_API_KEY = st.secrets["GOOGLE_API_KEY"]
 
 
9
 
10
  # Page configuration
11
  st.set_page_config(page_title="Chat with PDFs", page_icon="πŸ“š")
12
 
13
  def initialize_session_state():
14
- """Initialize session state variables"""
15
- session_state_vars = {
16
  "messages": [],
17
  "loaded_files": False,
18
  "pdf_content": None,
19
  "chat": None
20
  }
21
-
22
- for var, value in session_state_vars.items():
23
- if var not in st.session_state:
24
- st.session_state[var] = value
25
 
26
- def load_pdfs(pdf_folder):
27
- """Load PDFs and return their content"""
28
- if not os.path.exists(pdf_folder):
29
- os.makedirs(pdf_folder)
30
-
31
- loader = PyPDFDirectoryLoader(pdf_folder)
32
  documents = loader.load()
33
-
34
- # Concatenate all documents content
35
- content = "\n\n".join([doc.page_content for doc in documents])
36
- return content
37
 
38
  def initialize_chat(pdf_content):
39
- """Initialize Gemini chat with PDF content"""
40
  genai.configure(api_key=GOOGLE_API_KEY)
41
-
42
  generation_config = {
43
  "temperature": 0.7,
44
  "top_p": 0.95,
45
  "top_k": 40,
46
  "max_output_tokens": 8192,
47
  }
48
-
49
  model = genai.GenerativeModel(
50
  model_name="gemini-1.5-pro",
51
  generation_config=generation_config,
52
  )
53
-
54
- # Start chat with context
55
- context_prompt = f"""You are a helpful assistant that answers questions based on the following document content:
56
- {pdf_content}
57
- Please use this content to answer user questions. If the answer cannot be found in the content, say so."""
58
 
 
 
 
 
59
  chat = model.start_chat(history=[])
60
- # Send initial context
61
  chat.send_message(context_prompt)
62
  return chat
63
 
64
  def main():
65
  initialize_session_state()
66
-
67
  st.title("πŸ’¬ Chat with PDFs")
68
-
69
- # Sidebar for PDF upload
70
  with st.sidebar:
71
- st.header("Upload Documents")
72
- uploaded_files = st.file_uploader(
73
- "Upload your PDFs",
74
- type=["pdf"],
75
- accept_multiple_files=True
76
- )
77
 
78
  if uploaded_files and not st.session_state.loaded_files:
79
- # Create pdfs directory if it doesn't exist
80
  if not os.path.exists("pdfs"):
81
  os.makedirs("pdfs")
82
-
83
- # Clean up old PDF files
84
  for file in os.listdir("pdfs"):
85
  os.remove(os.path.join("pdfs", file))
86
-
87
- # Save uploaded files
88
  for file in uploaded_files:
89
  with open(f"pdfs/{file.name}", "wb") as f:
90
  f.write(file.getvalue())
91
-
92
- # Load PDF content
93
  with st.spinner("Processing PDFs..."):
94
  try:
95
- pdf_content = load_pdfs("pdfs")
96
- st.session_state.pdf_content = pdf_content
97
  st.session_state.loaded_files = True
98
-
99
- # Initialize chat with content
100
- st.session_state.chat = initialize_chat(pdf_content)
101
  except Exception as e:
102
- st.error(f"Error processing PDFs: {str(e)}")
103
  return
104
 
105
- # Main chat interface
106
  if st.session_state.loaded_files:
107
- # Display chat messages
108
- for message in st.session_state.messages:
109
- with st.chat_message(message["role"]):
110
- st.markdown(message["content"])
111
 
112
- # Chat input
113
  if prompt := st.chat_input("Ask a question about your PDFs:"):
114
- # Add user message to chat history
115
  st.session_state.messages.append({"role": "user", "content": prompt})
116
  with st.chat_message("user"):
117
  st.markdown(prompt)
118
 
119
  with st.chat_message("assistant"):
120
- response_placeholder = st.empty()
121
  try:
122
- # Get response from Gemini
123
  if not st.session_state.chat:
124
  st.session_state.chat = initialize_chat(st.session_state.pdf_content)
125
-
126
  response = st.session_state.chat.send_message(prompt)
127
  response_text = response.text
128
-
129
- response_placeholder.markdown(response_text)
130
-
131
- # Add assistant response to chat history
132
  st.session_state.messages.append({"role": "assistant", "content": response_text})
133
  except Exception as e:
134
- response_placeholder.error(f"Error generating response: {str(e)}")
135
-
136
  else:
137
- st.info("Please upload PDFs to start chatting.")
138
 
139
  if __name__ == "__main__":
140
- main()
 
1
  import streamlit as st
2
  import google.generativeai as genai
3
  from langchain.document_loaders import PyPDFDirectoryLoader
4
+ from dotenv import load_dotenv
5
  import os
 
6
 
7
+ # Load API keys from .env
8
+ load_dotenv()
9
+ GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
10
+ # HF_API_KEY = os.getenv("HF_API_KEY") # Not used directly here, but loaded if needed
11
 
12
  # Page configuration
13
  st.set_page_config(page_title="Chat with PDFs", page_icon="πŸ“š")
14
 
15
  def initialize_session_state():
16
+ session_vars = {
 
17
  "messages": [],
18
  "loaded_files": False,
19
  "pdf_content": None,
20
  "chat": None
21
  }
22
+ for key, value in session_vars.items():
23
+ if key not in st.session_state:
24
+ st.session_state[key] = value
 
25
 
26
+ def load_pdfs(folder_path):
27
+ if not os.path.exists(folder_path):
28
+ os.makedirs(folder_path)
29
+ loader = PyPDFDirectoryLoader(folder_path)
 
 
30
  documents = loader.load()
31
+ return "\n\n".join(doc.page_content for doc in documents)
 
 
 
32
 
33
  def initialize_chat(pdf_content):
 
34
  genai.configure(api_key=GOOGLE_API_KEY)
 
35
  generation_config = {
36
  "temperature": 0.7,
37
  "top_p": 0.95,
38
  "top_k": 40,
39
  "max_output_tokens": 8192,
40
  }
41
+
42
  model = genai.GenerativeModel(
43
  model_name="gemini-1.5-pro",
44
  generation_config=generation_config,
45
  )
 
 
 
 
 
46
 
47
+ context_prompt = f"""You are a helpful assistant. Use the following PDF content to answer user questions:
48
+ {pdf_content}
49
+ If the answer isn't in the content, reply accordingly."""
50
+
51
  chat = model.start_chat(history=[])
 
52
  chat.send_message(context_prompt)
53
  return chat
54
 
55
  def main():
56
  initialize_session_state()
 
57
  st.title("πŸ’¬ Chat with PDFs")
58
+
 
59
  with st.sidebar:
60
+ st.header("Upload PDFs")
61
+ uploaded_files = st.file_uploader("Upload PDF files", type=["pdf"], accept_multiple_files=True)
 
 
 
 
62
 
63
  if uploaded_files and not st.session_state.loaded_files:
 
64
  if not os.path.exists("pdfs"):
65
  os.makedirs("pdfs")
 
 
66
  for file in os.listdir("pdfs"):
67
  os.remove(os.path.join("pdfs", file))
 
 
68
  for file in uploaded_files:
69
  with open(f"pdfs/{file.name}", "wb") as f:
70
  f.write(file.getvalue())
71
+
 
72
  with st.spinner("Processing PDFs..."):
73
  try:
74
+ pdf_text = load_pdfs("pdfs")
75
+ st.session_state.pdf_content = pdf_text
76
  st.session_state.loaded_files = True
77
+ st.session_state.chat = initialize_chat(pdf_text)
 
 
78
  except Exception as e:
79
+ st.error(f"Error loading PDFs: {str(e)}")
80
  return
81
 
 
82
  if st.session_state.loaded_files:
83
+ for msg in st.session_state.messages:
84
+ with st.chat_message(msg["role"]):
85
+ st.markdown(msg["content"])
 
86
 
 
87
  if prompt := st.chat_input("Ask a question about your PDFs:"):
 
88
  st.session_state.messages.append({"role": "user", "content": prompt})
89
  with st.chat_message("user"):
90
  st.markdown(prompt)
91
 
92
  with st.chat_message("assistant"):
93
+ placeholder = st.empty()
94
  try:
 
95
  if not st.session_state.chat:
96
  st.session_state.chat = initialize_chat(st.session_state.pdf_content)
 
97
  response = st.session_state.chat.send_message(prompt)
98
  response_text = response.text
99
+ placeholder.markdown(response_text)
 
 
 
100
  st.session_state.messages.append({"role": "assistant", "content": response_text})
101
  except Exception as e:
102
+ placeholder.error(f"Error generating response: {str(e)}")
 
103
  else:
104
+ st.info("Please upload PDFs to begin.")
105
 
106
  if __name__ == "__main__":
107
+ main()