taesiri commited on
Commit
11a8662
β€’
1 Parent(s): 1364bea

updated to claude-3

Browse files
Files changed (1) hide show
  1. app.py +42 -41
app.py CHANGED
@@ -6,7 +6,7 @@ import os
6
  import arxiv
7
  import gradio as gr
8
  import requests
9
- from anthropic import AI_PROMPT, HUMAN_PROMPT, Anthropic
10
  from arxiv_latex_extractor import get_paper_content
11
  from huggingface_hub import HfApi
12
 
@@ -70,43 +70,54 @@ def get_paper_from_huggingface(paper_id):
70
 
71
 
72
  class ContextualQA:
73
- def __init__(self, client, model="claude-2.1"):
74
  self.client = client
75
  self.model = model
76
- self.context = ""
77
  self.questions = []
78
  self.responses = []
79
 
80
  def load_text(self, text):
81
- self.context = text
82
 
83
  def ask_question(self, question):
84
- if self.questions:
85
- # For the first question-answer pair, don't add HUMAN_PROMPT before the question
86
- first_pair = f"Question: {self.questions[0]}\n{AI_PROMPT} Answer: {self.responses[0]}"
87
- # For subsequent questions, include both HUMAN_PROMPT and AI_PROMPT
88
- subsequent_pairs = "\n".join(
89
- [
90
- f"{HUMAN_PROMPT} Question: {q}\n{AI_PROMPT} Answer: {a}"
91
- for q, a in zip(self.questions[1:], self.responses[1:])
92
- ]
93
- )
94
- history_context = f"{first_pair}\n{subsequent_pairs}"
95
- else:
96
- history_context = ""
97
-
98
- full_context = f"{self.context}\n\n{history_context}\n"
99
-
100
- prompt = f"{HUMAN_PROMPT} {full_context} {HUMAN_PROMPT} {question} {AI_PROMPT}"
101
-
102
- response = self.client.completions.create(
103
- prompt=prompt,
104
- stop_sequences=[HUMAN_PROMPT],
105
- max_tokens_to_sample=6000,
 
 
 
 
 
 
106
  model=self.model,
107
- stream=False,
 
 
 
108
  )
109
- answer = response.completion
 
 
110
  self.questions.append(question)
111
  self.responses.append(answer)
112
  return answer
@@ -186,7 +197,7 @@ def load_context(paper_id):
186
 
187
  # Initialize the Anthropic client and QA model
188
  client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
189
- qa_model = ContextualQA(client, model="claude-2.1")
190
  context = f"{LEADING_PROMPT}\n{latex_source}"
191
  qa_model.load_text(context)
192
 
@@ -235,17 +246,10 @@ with gr.Blocks(
235
  gr.HTML(
236
  """
237
  <h1 style='text-align: center; font-size: 24px;'>
238
- Explore ArXiv Papers in Depth with πŸ”₯ <code>claude-2.1</code> πŸ”₯- Ask Questions and Get Answers Instantly
239
  </h1>
240
  """
241
  )
242
- # gr.HTML(
243
- # """
244
- # <p style='text-align: justify; font-size: 18px; margin: 10px;'>
245
- # Explore the depths of ArXiv papers with our interactive app, powered by the advanced <code>claude-2.1</code> model. Ask detailed questions and get immediate, context-rich answers from academic papers.
246
- # </p>
247
- # """
248
- # )
249
 
250
  gr.HTML(
251
  """
@@ -287,7 +291,7 @@ with gr.Blocks(
287
  gr.Markdown(
288
  "## Acknowledgements\n"
289
  "This project is made possible through the generous support of "
290
- "[Anthropic](https://www.anthropic.com/), who provided free access to the `claude-2.1` API."
291
  )
292
 
293
  btn_load.click(load_context, inputs=[paper_id_input], outputs=[qa_model, chatbot])
@@ -307,6 +311,3 @@ with gr.Blocks(
307
  btn_clear.click(clear_context, outputs=[chatbot])
308
 
309
  demo.launch()
310
-
311
- # app.mount("/js", StaticFiles(directory="js"), name="js")
312
- # gr.mount_gradio_app(app, demo, path="/")
 
6
  import arxiv
7
  import gradio as gr
8
  import requests
9
+ from anthropic import Anthropic
10
  from arxiv_latex_extractor import get_paper_content
11
  from huggingface_hub import HfApi
12
 
 
70
 
71
 
72
  class ContextualQA:
73
+ def __init__(self, client, model="claude-3-opus-20240229", initial_context=""):
74
  self.client = client
75
  self.model = model
76
+ self.context = initial_context # Set the initial context here
77
  self.questions = []
78
  self.responses = []
79
 
80
  def load_text(self, text):
81
+ self.context = text # Update the context with new text
82
 
83
  def ask_question(self, question):
84
+ # Prepare the messages list with previous Q&A pairs and the current context
85
+ messages = [{
86
+ "role": "user",
87
+ "content": [{"type": "text", "text": "Read the document bleow and answer the questions\n"+self.context}]
88
+ }]
89
+ messages.append({
90
+ "role": "assistant",
91
+ "content": [{"type": "text", "text": "The document is loaded. You can now ask questions."}]
92
+ })
93
+
94
+ for q, a in zip(self.questions, self.responses):
95
+ messages.append({
96
+ "role": "user",
97
+ "content": [{"type": "text", "text": q}]
98
+ })
99
+ messages.append({
100
+ "role": "assistant",
101
+ "content": [{"type": "text", "text": a}]
102
+ })
103
+
104
+ # Add the new question
105
+ messages.append({
106
+ "role": "user",
107
+ "content": [{"type": "text", "text": question}]
108
+ })
109
+
110
+ # Create the message with the system context and the list of messages
111
+ response = self.client.messages.create(
112
  model=self.model,
113
+ max_tokens=1024,
114
+ system=self.context, # Pass the context directly as a string
115
+ messages=messages,
116
+ temperature=0 # Assuming you want deterministic responses
117
  )
118
+
119
+ # Assuming the response object has a 'content' attribute that contains the answer
120
+ answer = response.content[0].text
121
  self.questions.append(question)
122
  self.responses.append(answer)
123
  return answer
 
197
 
198
  # Initialize the Anthropic client and QA model
199
  client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
200
+ qa_model = ContextualQA(client, model="claude-3-opus-20240229")
201
  context = f"{LEADING_PROMPT}\n{latex_source}"
202
  qa_model.load_text(context)
203
 
 
246
  gr.HTML(
247
  """
248
  <h1 style='text-align: center; font-size: 24px;'>
249
+ Explore ArXiv Papers in Depth with πŸ”₯ <code>claude-3-opus-20240229</code> πŸ”₯- Ask Questions and Get Answers Instantly
250
  </h1>
251
  """
252
  )
 
 
 
 
 
 
 
253
 
254
  gr.HTML(
255
  """
 
291
  gr.Markdown(
292
  "## Acknowledgements\n"
293
  "This project is made possible through the generous support of "
294
+ "[Anthropic](https://www.anthropic.com/), who provided free access to the `claude-3` API."
295
  )
296
 
297
  btn_load.click(load_context, inputs=[paper_id_input], outputs=[qa_model, chatbot])
 
311
  btn_clear.click(clear_context, outputs=[chatbot])
312
 
313
  demo.launch()