lisaf commited on
Commit
ae0cbef
1 Parent(s): 5cb0831

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -90
app.py DELETED
@@ -1,90 +0,0 @@
1
- import streamlit as st
2
- from PyPDF2 import PdfReader
3
- from langchain.text_splitter import RecursiveCharacterTextSplitter
4
- import os
5
- from langchain_google_genai import GoogleGenerativeAIEmbeddings
6
- import google.generativeai as genai
7
- from langchain.vectorstores import FAISS
8
- from langchain_google_genai import ChatGoogleGenerativeAI
9
- from langchain.chains.question_answering import load_qa_chain
10
- from langchain.prompts import PromptTemplate
11
- from dotenv import load_dotenv
12
-
13
- load_dotenv()
14
- os.getenv("GOOGLE_API_KEY")
15
- genai.configure(api_key=os.getenv("GOOGLE_API_KEY"))
16
-
17
-
18
- def get_pdf_text(pdf_docs):
19
- text = ""
20
- for pdf in pdf_docs:
21
- pdf_reader = PdfReader(pdf)
22
- for page in pdf_reader.pages:
23
- text += page.extract_text()
24
- return text
25
-
26
-
27
- def get_text_chunks(text):
28
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=10000, chunk_overlap=1000)
29
- chunks = text_splitter.split_text(text)
30
- return chunks
31
-
32
-
33
- def get_vector_store(text_chunks):
34
- embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
35
- vector_store = FAISS.from_texts(text_chunks, embedding=embeddings)
36
- vector_store.save_local("faiss_index")
37
-
38
-
39
- def get_conversational_chain():
40
- prompt_template = """
41
- Answer the question as detailed as possible from the provided context, make sure to provide all the details, if the answer is not in
42
- provided context just say, "answer is not available in the context", don't provide the wrong answer\n\n
43
- Context:\n {context}?\n
44
- Question: \n{question}\n
45
-
46
- Answer:
47
- """
48
-
49
- model = ChatGoogleGenerativeAI(model="gemini-pro", temperature=0.3)
50
- prompt = PromptTemplate(template=prompt_template, input_variables=["context", "question"])
51
- chain = load_qa_chain(model, chain_type="stuff", prompt=prompt)
52
- return chain
53
-
54
-
55
- def user_input(user_question):
56
- embeddings = GoogleGenerativeAIEmbeddings(model="models/embedding-001")
57
-
58
- new_db = FAISS.load_local("faiss_index", embeddings, allow_dangerous_deserialization=True)
59
- docs = new_db.similarity_search(user_question)
60
-
61
- chain = get_conversational_chain()
62
-
63
- response = chain(
64
- {"input_documents": docs, "question": user_question}, return_only_outputs=True)
65
-
66
- st.write("Reply: ", response["output_text"])
67
-
68
-
69
- def main():
70
- st.set_page_config("Chat PDF")
71
- st.header("Chat with PDF using Gemini💁")
72
-
73
- user_question = st.text_input("Ask a Question from the PDF Files")
74
-
75
- if user_question:
76
- user_input(user_question)
77
-
78
- with st.sidebar:
79
- st.title("Menu:")
80
- pdf_docs = st.file_uploader("Upload your PDF Files and Click on the Submit & Process Button", accept_multiple_files=True)
81
- if st.button("Submit & Process"):
82
- with st.spinner("Processing..."):
83
- raw_text = get_pdf_text(pdf_docs)
84
- text_chunks = get_text_chunks(raw_text)
85
- get_vector_store(text_chunks)
86
- st.success("Done")
87
-
88
-
89
- if __name__ == "__main__":
90
- main()