Yan commited on
Commit
877e510
1 Parent(s): 8268f49

trying out longer token count to see if we can get more verbose answer

Browse files
Files changed (1) hide show
  1. handler.py +1 -1
handler.py CHANGED
@@ -207,7 +207,7 @@ class EndpointHandler():
207
  img_features=None,
208
  img_attn_mask=inputs["img_attn_mask"][:, :-1].to("cuda"),
209
  use_cache=True,
210
- max_new_tokens=128,
211
  )
212
  generated_text = self.processor.batch_decode(generated_ids, skip_special_tokens=True)[0]
213
 
 
207
  img_features=None,
208
  img_attn_mask=inputs["img_attn_mask"][:, :-1].to("cuda"),
209
  use_cache=True,
210
+ max_new_tokens=512,
211
  )
212
  generated_text = self.processor.batch_decode(generated_ids, skip_special_tokens=True)[0]
213