KingNish commited on
Commit
8d7d352
1 Parent(s): 4694189

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -27,14 +27,14 @@ model.to("cpu")
27
 
28
 
29
  def llava(message, history):
30
- if message.files:
31
- image = message.files[0].path
32
  else:
33
  for hist in history:
34
  if type(hist[0])==tuple:
35
  image = hist[0][0]
36
 
37
- txt = message.text
38
 
39
  gr.Info("Analyzing image")
40
  image = Image.open(image).convert("RGB")
@@ -93,7 +93,7 @@ def respond(message, history):
93
 
94
  user_prompt = message
95
  # Handle image processing
96
- if message.files:
97
  inputs = llava(message, history)
98
  streamer = TextIteratorStreamer(processor, skip_prompt=True, **{"skip_special_tokens": True})
99
  generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=1024)
@@ -117,7 +117,7 @@ def respond(message, history):
117
  func_caller.append({"role": "user", "content": f"{str(msg[0])}"})
118
  func_caller.append({"role": "assistant", "content": f"{str(msg[1])}"})
119
 
120
- message_text = message.text
121
  func_caller.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message_text}'})
122
 
123
  response = client_gemma.chat_completion(func_caller, max_tokens=200)
 
27
 
28
 
29
  def llava(message, history):
30
+ if message["files"]:
31
+ image = message["files"][0]
32
  else:
33
  for hist in history:
34
  if type(hist[0])==tuple:
35
  image = hist[0][0]
36
 
37
+ txt = message["text"]
38
 
39
  gr.Info("Analyzing image")
40
  image = Image.open(image).convert("RGB")
 
93
 
94
  user_prompt = message
95
  # Handle image processing
96
+ if message["files"]:
97
  inputs = llava(message, history)
98
  streamer = TextIteratorStreamer(processor, skip_prompt=True, **{"skip_special_tokens": True})
99
  generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=1024)
 
117
  func_caller.append({"role": "user", "content": f"{str(msg[0])}"})
118
  func_caller.append({"role": "assistant", "content": f"{str(msg[1])}"})
119
 
120
+ message_text = message["text"]
121
  func_caller.append({"role": "user", "content": f'[SYSTEM]You are a helpful assistant. You have access to the following functions: \n {str(functions_metadata)}\n\nTo use these functions respond with:\n<functioncall> {{ "name": "function_name", "arguments": {{ "arg_1": "value_1", "arg_1": "value_1", ... }} }} </functioncall> [USER] {message_text}'})
122
 
123
  response = client_gemma.chat_completion(func_caller, max_tokens=200)