wholewhale commited on
Commit
b13b769
1 Parent(s): d0d7875

gradio change

Browse files
Files changed (1) hide show
  1. app.py +23 -54
app.py CHANGED
@@ -4,10 +4,10 @@ from langchain.document_loaders import OnlinePDFLoader
4
  from langchain.text_splitter import CharacterTextSplitter
5
  from langchain.chat_models import ChatAnthropic
6
  from langchain.prompts import ChatPromptTemplate
7
- from langchain.document_loaders import TextLoader
8
 
9
- # Set API keys from environment variables
10
- os.environ['WORKING_ANTHROPIC_API_KEY'] = os.getenv("ANTHROPIC_API_KEY")
11
 
12
  pdf_content = ""
13
 
@@ -20,8 +20,6 @@ def load_pdf(pdf_doc):
20
  # Load PDF content
21
  loader = OnlinePDFLoader(pdf_doc.name)
22
  documents = loader.load()
23
-
24
- # Assuming the `documents` is a list of strings representing each page
25
  pdf_content = ' '.join(documents)
26
 
27
  return "PDF Loaded Successfully."
@@ -30,71 +28,42 @@ def load_pdf(pdf_doc):
30
  return f"Error processing PDF: {e}"
31
 
32
  def chat_with_pdf(question):
33
- # Create an instance of the ChatAnthropic model
34
  model = ChatAnthropic()
35
-
36
- # Define the chat prompt template
37
  prompt = ChatPromptTemplate.from_messages([
38
  ("human", pdf_content),
39
  ("human", question),
40
- ("human", "Give a clear summary of this pdf information at a 8th grade reading level.")
41
  ])
42
-
43
- # Invoke the model using the chain
44
  chain = prompt | model
45
  response = chain.invoke({})
46
-
47
- # Get the summary of the PDF content
48
  summarizer = pipeline("summarization")
49
  summary = summarizer(pdf_content, max_length=1000, min_length=30, do_sample=False)[0]['summary_text']
50
-
51
- # Combine the chat response and the summary
52
  combined_response = f"Summary: {summary}\n\nChat Response: {response.content}"
53
-
54
  return combined_response
55
 
56
- # Define Gradio UI
57
- def gradio_interface(pdf_doc, question):
58
- # ...
59
- return gr.Interface(
60
- fn=chat_with_pdf,
61
- inputs=[pdf_doc, question],
62
- outputs=gr.outputs.Textbox(),
63
- api_name='chat_with_pdf_2'
64
- )
65
-
66
  def gradio_interface(pdf_doc, question):
67
  if not pdf_content:
68
  return load_pdf(pdf_doc)
69
  else:
70
- # Get the summary of the PDF content
71
  summarizer = pipeline("summarization")
72
  summary = summarizer(pdf_content, max_length=100, min_length=30, do_sample=False)[0]['summary_text']
73
-
74
- # Get the chat response
75
  response = chat_with_pdf(question)
76
-
77
- # Define the outputs
78
- summary_output = gr.outputs.Textbox(label="Summary")
79
- chat_output = gr.outputs.Textbox(label="Chat Response")
80
-
81
- # Return the Gradio interface with the Multi output
82
- return gr.Interface(
83
- fn=chat_with_pdf,
84
- inputs=[pdf_doc, question],
85
- outputs=gradio.outputs.Multi(summary_output, chat_output),
86
- examples=[["sample.pdf", "What is this document about?"]],
87
- api_name='chat_with_pdf_2'
88
- )
89
-
90
- gradio_interface(None, None)
91
-
92
-
93
- gr.Interface(fn=gradio_interface,
94
- inputs=[gr.File(label="Load a pdf", file_types=['.pdf'], type="file"),
95
- gr.Textbox(label="Ask a question about the PDF")],
96
- outputs="text",
97
- live=True,
98
- title="Chat with PDF content using Anthropic",
99
- description="Upload a .PDF and interactively chat about its content."
100
- ).launch()
 
4
  from langchain.text_splitter import CharacterTextSplitter
5
  from langchain.chat_models import ChatAnthropic
6
  from langchain.prompts import ChatPromptTemplate
7
+ from transformers import pipeline
8
 
9
+ # Fetch API key from environment variables
10
+ ANTHROPIC_API_KEY = os.getenv("ANTHROPIC_API_KEY")
11
 
12
  pdf_content = ""
13
 
 
20
  # Load PDF content
21
  loader = OnlinePDFLoader(pdf_doc.name)
22
  documents = loader.load()
 
 
23
  pdf_content = ' '.join(documents)
24
 
25
  return "PDF Loaded Successfully."
 
28
  return f"Error processing PDF: {e}"
29
 
30
  def chat_with_pdf(question):
 
31
  model = ChatAnthropic()
 
 
32
  prompt = ChatPromptTemplate.from_messages([
33
  ("human", pdf_content),
34
  ("human", question),
35
+ ("human", "Give a clear summary of this pdf information at an 8th grade reading level.")
36
  ])
 
 
37
  chain = prompt | model
38
  response = chain.invoke({})
 
 
39
  summarizer = pipeline("summarization")
40
  summary = summarizer(pdf_content, max_length=1000, min_length=30, do_sample=False)[0]['summary_text']
 
 
41
  combined_response = f"Summary: {summary}\n\nChat Response: {response.content}"
 
42
  return combined_response
43
 
 
 
 
 
 
 
 
 
 
 
44
  def gradio_interface(pdf_doc, question):
45
  if not pdf_content:
46
  return load_pdf(pdf_doc)
47
  else:
 
48
  summarizer = pipeline("summarization")
49
  summary = summarizer(pdf_content, max_length=100, min_length=30, do_sample=False)[0]['summary_text']
 
 
50
  response = chat_with_pdf(question)
51
+ return {
52
+ "Summary": summary,
53
+ "Chat Response": response
54
+ }
55
+
56
+ gr.Interface(
57
+ fn=gradio_interface,
58
+ inputs=[
59
+ gr.File(label="Load a pdf", file_types=['.pdf'], type="file"),
60
+ gr.Textbox(label="Ask a question about the PDF")
61
+ ],
62
+ outputs=[
63
+ gr.outputs.Textbox(label="Summary"),
64
+ gr.outputs.Textbox(label="Chat Response")
65
+ ],
66
+ live=True,
67
+ title="Chat with PDF content using Anthropic",
68
+ description="Upload a .PDF and interactively chat about its content."
69
+ ).launch()