ndurner commited on
Commit
06d8a03
·
1 Parent(s): 4ca4a77

port to Claude 3

Browse files
Files changed (3) hide show
  1. README.md +2 -2
  2. app.py +29 -31
  3. requirements.txt +1 -1
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- title: OAI Chat
3
  emoji: 📈
4
  colorFrom: yellow
5
  colorTo: gray
@@ -10,4 +10,4 @@ pinned: false
10
  license: mit
11
  ---
12
 
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
  ---
2
+ title: Claude Chat
3
  emoji: 📈
4
  colorFrom: yellow
5
  colorTo: gray
 
10
  license: mit
11
  ---
12
 
13
+ Chat interface based on the Anthropic Claude foundation models.
app.py CHANGED
@@ -1,7 +1,7 @@
1
  import gradio as gr
2
  import base64
3
  import os
4
- from openai import OpenAI
5
  import json
6
 
7
  from doc2json import process_docx
@@ -20,7 +20,7 @@ def encode_image(image_data):
20
  image_data: The image data, encoded in base64.
21
 
22
  Returns:
23
- A string containing the prefix.
24
  """
25
 
26
  # Get the first few bytes of the image data.
@@ -43,7 +43,9 @@ def encode_image(image_data):
43
  # Unknown image type.
44
  raise Exception("Unknown image type")
45
 
46
- return f"data:image/{image_type};base64,{base64.b64encode(image_data).decode('utf-8')}"
 
 
47
 
48
  def add_text(history, text):
49
  history = history + [(text, None)]
@@ -100,34 +102,28 @@ def save_settings(acc, sec, prompt, temp, tokens, model):
100
  def process_values_js():
101
  return """
102
  () => {
103
- return ["oai_key", "system_prompt", "seed"];
104
  }
105
  """
106
 
107
- def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens, model):
108
  try:
109
- client = OpenAI(
110
- api_key=oai_key
111
  )
112
 
113
- seed_i = None
114
- if seed:
115
- seed_i = int(seed)
116
-
117
  if log_to_console:
118
  print(f"bot history: {str(history)}")
119
 
120
  history_openai_format = []
121
  user_msg_parts = []
122
- if system_prompt:
123
- history_openai_format.append({"role": "system", "content": system_prompt})
124
  for human, assi in history:
125
  if human is not None:
126
  if human.startswith(image_embed_prefix):
127
  with open(human.lstrip(image_embed_prefix), mode="rb") as f:
128
  content = f.read()
129
- user_msg_parts.append({"type": "image_url",
130
- "image_url":{"url": encode_image(content)}})
131
  else:
132
  user_msg_parts.append({"type": "text", "text": human})
133
 
@@ -147,18 +143,22 @@ def bot(message, history, oai_key, system_prompt, seed, temperature, max_tokens,
147
  if log_to_console:
148
  print(f"br_prompt: {str(history_openai_format)}")
149
 
150
- response = client.chat.completions.create(
151
  model=model,
152
  messages= history_openai_format,
153
  temperature=temperature,
154
- seed=seed_i,
155
- max_tokens=max_tokens
156
  )
157
 
158
  if log_to_console:
159
  print(f"br_response: {str(response)}")
160
 
161
- history[-1][1] = response.choices[0].message.content
 
 
 
 
162
  if log_to_console:
163
  print(f"br_result: {str(history)}")
164
 
@@ -182,14 +182,13 @@ def import_history(history, file):
182
  return history
183
 
184
  with gr.Blocks() as demo:
185
- gr.Markdown("# OAI Chat (Nils' Version™️)")
186
 
187
  with gr.Accordion("Settings"):
188
- oai_key = gr.Textbox(label="OpenAI API Key", elem_id="oai_key")
189
- model = gr.Dropdown(label="Model", value="gpt-4-turbo-preview", allow_custom_value=True, elem_id="model",
190
- choices=["gpt-4-turbo-preview", "gpt-4-1106-preview", "gpt-4", "gpt-4-vision-preview", "gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-3.5-turbo-1106"])
191
  system_prompt = gr.TextArea("You are a helpful AI.", label="System Prompt", lines=3, max_lines=250, elem_id="system_prompt")
192
- seed = gr.Textbox(label="Seed", elem_id="seed")
193
  temp = gr.Slider(0, 1, label="Temperature", elem_id="temp", value=1)
194
  max_tokens = gr.Slider(1, 4000, label="Max. Tokens", elem_id="max_tokens", value=800)
195
  save_button = gr.Button("Save Settings")
@@ -197,7 +196,7 @@ with gr.Blocks() as demo:
197
 
198
  load_button.click(load_settings, js="""
199
  () => {
200
- let elems = ['#oai_key textarea', '#system_prompt textarea', '#seed textarea', '#temp input', '#max_tokens input', '#model'];
201
  elems.forEach(elem => {
202
  let item = document.querySelector(elem);
203
  let event = new InputEvent('input', { bubbles: true });
@@ -207,11 +206,10 @@ with gr.Blocks() as demo:
207
  }
208
  """)
209
 
210
- save_button.click(save_settings, [oai_key, system_prompt, seed, temp, max_tokens, model], js="""
211
- (oai, sys, seed, temp, ntok, model) => {
212
- localStorage.setItem('oai_key', oai);
213
  localStorage.setItem('system_prompt', sys);
214
- localStorage.setItem('seed', seed);
215
  localStorage.setItem('temp', document.querySelector('#temp input').value);
216
  localStorage.setItem('max_tokens', document.querySelector('#max_tokens input').value);
217
  localStorage.setItem('model', model);
@@ -243,7 +241,7 @@ with gr.Blocks() as demo:
243
  )
244
  submit_btn = gr.Button("🚀 Send", scale=0)
245
  submit_click = submit_btn.click(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
246
- bot, [txt, chatbot, oai_key, system_prompt, seed, temp, max_tokens, model], [txt, chatbot],
247
  )
248
  submit_click.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
249
 
@@ -254,7 +252,7 @@ with gr.Blocks() as demo:
254
  dmp_btn.click(dump, inputs=[chatbot], outputs=[txt_dmp])
255
 
256
  txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
257
- bot, [txt, chatbot, oai_key, system_prompt, seed, temp, max_tokens, model], [txt, chatbot],
258
  )
259
  txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
260
  file_msg = btn.upload(add_file, [chatbot, btn], [chatbot], queue=False, postprocess=False)
 
1
  import gradio as gr
2
  import base64
3
  import os
4
+ from anthropic import Anthropic
5
  import json
6
 
7
  from doc2json import process_docx
 
20
  image_data: The image data, encoded in base64.
21
 
22
  Returns:
23
+ An object encoding the image
24
  """
25
 
26
  # Get the first few bytes of the image data.
 
43
  # Unknown image type.
44
  raise Exception("Unknown image type")
45
 
46
+ return {"type": "base64",
47
+ "media_type": "image/" + image_type,
48
+ "data": base64.b64encode(image_data).decode('utf-8')}
49
 
50
  def add_text(history, text):
51
  history = history + [(text, None)]
 
102
  def process_values_js():
103
  return """
104
  () => {
105
+ return ["api_key", "system_prompt"];
106
  }
107
  """
108
 
109
+ def bot(message, history, api_key, system_prompt, temperature, max_tokens, model):
110
  try:
111
+ client = Anthropic(
112
+ api_key=api_key
113
  )
114
 
 
 
 
 
115
  if log_to_console:
116
  print(f"bot history: {str(history)}")
117
 
118
  history_openai_format = []
119
  user_msg_parts = []
 
 
120
  for human, assi in history:
121
  if human is not None:
122
  if human.startswith(image_embed_prefix):
123
  with open(human.lstrip(image_embed_prefix), mode="rb") as f:
124
  content = f.read()
125
+ user_msg_parts.append({"type": "image",
126
+ "source": encode_image(content)})
127
  else:
128
  user_msg_parts.append({"type": "text", "text": human})
129
 
 
143
  if log_to_console:
144
  print(f"br_prompt: {str(history_openai_format)}")
145
 
146
+ response = client.messages.create(
147
  model=model,
148
  messages= history_openai_format,
149
  temperature=temperature,
150
+ max_tokens=max_tokens,
151
+ system=system_prompt
152
  )
153
 
154
  if log_to_console:
155
  print(f"br_response: {str(response)}")
156
 
157
+ resp = ""
158
+ for content in response.content:
159
+ resp += content.text
160
+
161
+ history[-1][1] = resp
162
  if log_to_console:
163
  print(f"br_result: {str(history)}")
164
 
 
182
  return history
183
 
184
  with gr.Blocks() as demo:
185
+ gr.Markdown("# Anthropic™️ Claude™️ Chat (Nils' Version™️)")
186
 
187
  with gr.Accordion("Settings"):
188
+ api_key = gr.Textbox(label="Anthropic API Key", elem_id="api_key")
189
+ model = gr.Dropdown(label="Model", value="claude-3-opus-20240229", allow_custom_value=True, elem_id="model",
190
+ choices=["claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307"])
191
  system_prompt = gr.TextArea("You are a helpful AI.", label="System Prompt", lines=3, max_lines=250, elem_id="system_prompt")
 
192
  temp = gr.Slider(0, 1, label="Temperature", elem_id="temp", value=1)
193
  max_tokens = gr.Slider(1, 4000, label="Max. Tokens", elem_id="max_tokens", value=800)
194
  save_button = gr.Button("Save Settings")
 
196
 
197
  load_button.click(load_settings, js="""
198
  () => {
199
+ let elems = ['#api_key textarea', '#system_prompt textarea', '#temp input', '#max_tokens input', '#model'];
200
  elems.forEach(elem => {
201
  let item = document.querySelector(elem);
202
  let event = new InputEvent('input', { bubbles: true });
 
206
  }
207
  """)
208
 
209
+ save_button.click(save_settings, [api_key, system_prompt, temp, max_tokens, model], js="""
210
+ (oai, sys, temp, ntok, model) => {
211
+ localStorage.setItem('api_key', oai);
212
  localStorage.setItem('system_prompt', sys);
 
213
  localStorage.setItem('temp', document.querySelector('#temp input').value);
214
  localStorage.setItem('max_tokens', document.querySelector('#max_tokens input').value);
215
  localStorage.setItem('model', model);
 
241
  )
242
  submit_btn = gr.Button("🚀 Send", scale=0)
243
  submit_click = submit_btn.click(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
244
+ bot, [txt, chatbot, api_key, system_prompt, temp, max_tokens, model], [txt, chatbot],
245
  )
246
  submit_click.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
247
 
 
252
  dmp_btn.click(dump, inputs=[chatbot], outputs=[txt_dmp])
253
 
254
  txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
255
+ bot, [txt, chatbot, api_key, system_prompt, temp, max_tokens, model], [txt, chatbot],
256
  )
257
  txt_msg.then(lambda: gr.Textbox(interactive=True), None, [txt], queue=False)
258
  file_msg = btn.upload(add_file, [chatbot, btn], [chatbot], queue=False, postprocess=False)
requirements.txt CHANGED
@@ -1,3 +1,3 @@
1
  gradio
2
- openai >= 1.0.0
3
  lxml
 
1
  gradio
2
+ anthropic
3
  lxml