taesiri commited on
Commit
e6b26e0
1 Parent(s): 0ff2e20

upgrade to claude 2.0

Browse files
Files changed (2) hide show
  1. app.py +25 -24
  2. requirements.txt +2 -1
app.py CHANGED
@@ -3,7 +3,7 @@ import os
3
  import re
4
  import tarfile
5
 
6
- import anthropic
7
  import gradio as gr
8
  import requests
9
 
@@ -83,7 +83,7 @@ def download_arxiv_source(paper_id):
83
 
84
 
85
  class ContextualQA:
86
- def __init__(self, client, model="claude-v1.3-100k"):
87
  self.client = client
88
  self.model = model
89
  self.context = ""
@@ -94,20 +94,21 @@ class ContextualQA:
94
  self.context = text
95
 
96
  def ask_question(self, question):
97
- leading_prompt = "Here is the content of a paper:"
98
- trailing_prompt = "Now, answer the following question below. You can optionally use Markdown to format your answer."
99
- prompt = f"{anthropic.HUMAN_PROMPT} {leading_prompt}\n\n{self.context}\n\n{trailing_prompt}\n\n{anthropic.HUMAN_PROMPT} {question}\n\n{anthropic.AI_PROMPT}"
100
- response = self.client.completion_stream(
 
101
  prompt=prompt,
102
- stop_sequences=[anthropic.HUMAN_PROMPT],
103
  max_tokens_to_sample=6000,
104
  model=self.model,
105
  stream=False,
106
  )
107
- responses = [data for data in response]
108
  self.questions.append(question)
109
- self.responses.append(responses)
110
- return responses
111
 
112
  def clear_context(self):
113
  self.context = ""
@@ -130,9 +131,9 @@ def load_context(paper_id):
130
  except Exception as e:
131
  return None, [(f"Error loading paper with id {paper_id}.", str(e))]
132
 
133
- client = anthropic.Client(api_key=os.environ["ANTHROPIC_API_KEY"])
134
- model = ContextualQA(client, model="claude-v1.3-100k")
135
- model.load_text(latex_source)
136
 
137
  # Usage
138
  title, abstract = get_paper_info(paper_id)
@@ -141,7 +142,7 @@ def load_context(paper_id):
141
  abstract = replace_texttt(abstract)
142
 
143
  return (
144
- model,
145
  [
146
  (
147
  f"Load the paper with id {paper_id}.",
@@ -151,23 +152,23 @@ def load_context(paper_id):
151
  )
152
 
153
 
154
- def answer_fn(model, question, chat_history):
155
  # if question is empty, tell user that they need to ask a question
156
  if question == "":
157
  chat_history.append(("No Question Asked", "Please ask a question."))
158
- return model, chat_history, ""
159
 
160
- client = anthropic.Client(api_key=os.environ["ANTHROPIC_API_KEY"])
161
- model.client = client
162
 
163
  try:
164
- response = model.ask_question(question)
165
  except Exception as e:
166
  chat_history.append(("Error Asking Question", str(e)))
167
- return model, chat_history, ""
168
 
169
- chat_history.append((question, response[0]["completion"]))
170
- return model, chat_history, ""
171
 
172
 
173
  def clear_context():
@@ -176,10 +177,10 @@ def clear_context():
176
 
177
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
178
  gr.Markdown(
179
- "# Explore ArXiv Papers in Depth with `claude-v1.3-100k` - Ask Questions and Receive Detailed Answers Instantly"
180
  )
181
  gr.Markdown(
182
- "Dive into the world of academic papers with our dynamic app, powered by the cutting-edge `claude-v1.3-100k` model. This app allows you to ask detailed questions about any ArXiv paper and receive direct answers from the paper's content. Utilizing a context length of 100k tokens, it provides an efficient and comprehensive exploration of complex research studies, making knowledge acquisition simpler and more interactive. (This text is generated by GPT-4 )"
183
  )
184
 
185
  gr.HTML(
 
3
  import re
4
  import tarfile
5
 
6
+ from anthropic import AI_PROMPT, HUMAN_PROMPT, Anthropic
7
  import gradio as gr
8
  import requests
9
 
 
83
 
84
 
85
  class ContextualQA:
86
+ def __init__(self, client, model="claude-2.0"):
87
  self.client = client
88
  self.model = model
89
  self.context = ""
 
94
  self.context = text
95
 
96
  def ask_question(self, question):
97
+ leading_prompt = "Give the following paper:"
98
+ trailing_prompt = "Now, answer the following question based on the content of the paper above. You can optionally use Markdown to format your answer or LaTeX typesetting to improve the presentation of your answer."
99
+
100
+ prompt = f"{HUMAN_PROMPT} {leading_prompt} {self.context} {trailing_prompt} {HUMAN_PROMPT} {question} {AI_PROMPT}"
101
+ response = self.client.completions.create(
102
  prompt=prompt,
103
+ stop_sequences=[HUMAN_PROMPT],
104
  max_tokens_to_sample=6000,
105
  model=self.model,
106
  stream=False,
107
  )
108
+ answer = response.completion
109
  self.questions.append(question)
110
+ self.responses.append(answer)
111
+ return answer
112
 
113
  def clear_context(self):
114
  self.context = ""
 
131
  except Exception as e:
132
  return None, [(f"Error loading paper with id {paper_id}.", str(e))]
133
 
134
+ client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
135
+ qa_model = ContextualQA(client, model="claude-2.0")
136
+ qa_model.load_text(latex_source)
137
 
138
  # Usage
139
  title, abstract = get_paper_info(paper_id)
 
142
  abstract = replace_texttt(abstract)
143
 
144
  return (
145
+ qa_model,
146
  [
147
  (
148
  f"Load the paper with id {paper_id}.",
 
152
  )
153
 
154
 
155
+ def answer_fn(qa_model, question, chat_history):
156
  # if question is empty, tell user that they need to ask a question
157
  if question == "":
158
  chat_history.append(("No Question Asked", "Please ask a question."))
159
+ return qa_model, chat_history, ""
160
 
161
+ client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
162
+ qa_model.client = client
163
 
164
  try:
165
+ answer = qa_model.ask_question(question)
166
  except Exception as e:
167
  chat_history.append(("Error Asking Question", str(e)))
168
+ return qa_model, chat_history, ""
169
 
170
+ chat_history.append((question, answer))
171
+ return qa_model, chat_history, ""
172
 
173
 
174
  def clear_context():
 
177
 
178
  with gr.Blocks(theme=gr.themes.Soft()) as demo:
179
  gr.Markdown(
180
+ "# Explore ArXiv Papers in Depth with `claude-2.0` - Ask Questions and Receive Detailed Answers Instantly"
181
  )
182
  gr.Markdown(
183
+ "Dive into the world of academic papers with our dynamic app, powered by the cutting-edge `claude-2.0` model. This app allows you to ask detailed questions about any ArXiv paper and receive direct answers from the paper's content. Utilizing a context length of 100k tokens, it provides an efficient and comprehensive exploration of complex research studies, making knowledge acquisition simpler and more interactive. (This text is generated by GPT-4 )"
184
  )
185
 
186
  gr.HTML(
requirements.txt CHANGED
@@ -5,4 +5,5 @@ pandas
5
  seaborn
6
  tqdm
7
  numpy
8
- arxiv
 
 
5
  seaborn
6
  tqdm
7
  numpy
8
+ arxiv
9
+ tiktoken