Noobian commited on
Commit
fa07fe6
1 Parent(s): af1977c

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +50 -0
app.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import PyPDF2
3
+ from langchain.embeddings.openai import OpenAIEmbeddings
4
+ from langchain.vectorstores.faiss import FAISS
5
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
6
+ from langchain import OpenAI, VectorDBQA
7
+
8
+ import os
9
+ os.environ["OPENAI_API_KEY"] = openai_api_key
10
+
11
+
12
+ def pdf_to_text(pdf_file, query):
13
+ # Open the PDF file in binary mode
14
+ with open(pdf_file.name, 'rb') as pdf_file:
15
+ # Create a PDF reader object
16
+ pdf_reader = PyPDF2.PdfReader(pdf_file)
17
+
18
+ # Create an empty string to store the text
19
+ text = ""
20
+
21
+ # Loop through each page of the PDF
22
+ for page_num in range(len(pdf_reader.pages)):
23
+ # Get the page object
24
+ page = pdf_reader.pages[page_num]
25
+ # Extract the texst from the page and add it to the text variable
26
+ text += page.extract_text()
27
+ #embedding step
28
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=0)
29
+ texts = text_splitter.split_text(text)
30
+
31
+ embeddings = OpenAIEmbeddings()
32
+ #vector store
33
+ vectorstore = FAISS.from_texts(texts, embeddings)
34
+
35
+ #inference
36
+ qa = VectorDBQA.from_chain_type(llm=OpenAI(), chain_type="stuff", vectorstore=vectorstore)
37
+ return qa.run(query)
38
+
39
+
40
+
41
+
42
+
43
+ # Define the Gradio interface
44
+ pdf_input = gr.inputs.File(label="PDF File")
45
+ query_input = gr.inputs.Textbox(label="Query")
46
+ outputs = gr.outputs.Textbox(label="Chatbot Response")
47
+ interface = gr.Interface(fn=pdf_to_text, inputs=[pdf_input, query_input], outputs=outputs)
48
+
49
+ # Run the interface
50
+ interface.launch()