HuanjinYao commited on
Commit
f34e2ac
1 Parent(s): ec4c4f7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -2
app.py CHANGED
@@ -88,14 +88,21 @@ def bot_streaming(message, history):
88
  image = Image.open(image).convert('RGB')
89
  image_tensor = process_images([image], image_processor, model.config)[0]
90
 
 
 
91
  inputs = tokenizer_image_token(prompt, tokenizer, IMAGE_TOKEN_INDEX, return_tensors='pt').unsqueeze(0)
92
 
93
- print('inputs', inputs)
 
 
 
 
 
94
 
95
 
96
  streamer = TextIteratorStreamer(tokenizer, timeout=10.0, skip_prompt=True, skip_special_tokens=True)
97
 
98
- generation_kwargs = dict(input_ids=inputs, streamer=streamer, max_new_tokens=1024, do_sample=False, eos_token_id = terminators)
99
 
100
  print('here?')
101
 
 
88
  image = Image.open(image).convert('RGB')
89
  image_tensor = process_images([image], image_processor, model.config)[0]
90
 
91
+
92
+
93
  inputs = tokenizer_image_token(prompt, tokenizer, IMAGE_TOKEN_INDEX, return_tensors='pt').unsqueeze(0)
94
 
95
+
96
+ image_tensor = image_tensor.to(dtype=torch.float16, device='cuda', non_blocking=True)
97
+ input_ids.to(device='cuda', non_blocking=True)
98
+
99
+ print('image', image_tensor.shape)
100
+ print('inputs', inputs.shape)
101
 
102
 
103
  streamer = TextIteratorStreamer(tokenizer, timeout=10.0, skip_prompt=True, skip_special_tokens=True)
104
 
105
+ generation_kwargs = dict(input_ids=inputs, images=image_tensor, streamer=streamer, max_new_tokens=1024, do_sample=False, eos_token_id = terminators)
106
 
107
  print('here?')
108