vilarin commited on
Commit
90b9de8
1 Parent(s): a927087

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -49,7 +49,8 @@ eos_token_id=processor.tokenizer.eos_token_id
49
 
50
  @spaces.GPU(queue=False)
51
  def stream_chat(message, history: list, temperature: float, max_new_tokens: int):
52
- print(message)
 
53
  conversation = []
54
  for prompt, answer in history:
55
  conversation.extend([{"role": "user", "content": prompt}, {"role": "assistant", "content": answer}])
@@ -60,10 +61,11 @@ def stream_chat(message, history: list, temperature: float, max_new_tokens: int)
60
  else:
61
  if len(history) == 0:
62
  raise gr.Error("Please upload an image first.")
63
- image = None
 
 
64
  conversation.append({"role": "user", "content": message['text']})
65
- print(conversation)
66
-
67
  inputs = processor.tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)
68
  inputs_ids = processor(inputs, image, return_tensors="pt").to(0)
69
  streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": True, "skip_prompt": True, 'clean_up_tokenization_spaces':False,})
 
49
 
50
  @spaces.GPU(queue=False)
51
  def stream_chat(message, history: list, temperature: float, max_new_tokens: int):
52
+ print(f'message is - {message}')
53
+ print(f'history is - {history}')
54
  conversation = []
55
  for prompt, answer in history:
56
  conversation.extend([{"role": "user", "content": prompt}, {"role": "assistant", "content": answer}])
 
61
  else:
62
  if len(history) == 0:
63
  raise gr.Error("Please upload an image first.")
64
+ image = None
65
+ elif len(history):
66
+ image = history
67
  conversation.append({"role": "user", "content": message['text']})
68
+ print(f"Conversation is -\n{conversation}")
 
69
  inputs = processor.tokenizer.apply_chat_template(conversation, tokenize=False, add_generation_prompt=True)
70
  inputs_ids = processor(inputs, image, return_tensors="pt").to(0)
71
  streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": True, "skip_prompt": True, 'clean_up_tokenization_spaces':False,})