finalf0 commited on
Commit
0686a72
1 Parent(s): a4b23f2

Ban text chat

Browse files
Files changed (1) hide show
  1. app.py +10 -1
app.py CHANGED
@@ -44,7 +44,6 @@ if 'int4' in model_path:
44
  if device == 'mps':
45
  print('Error: running int4 model with bitsandbytes on Mac is not supported right now.')
46
  exit()
47
- #model = AutoModel.from_pretrained(model_path, trust_remote_code=True, attn_implementation='sdpa')
48
  model = AutoModel.from_pretrained(model_path, trust_remote_code=True)
49
  else:
50
  if True: #args.multi_gpus:
@@ -282,6 +281,9 @@ def respond(_question, _chat_bot, _app_cfg, params_form):
282
  if files_cnts[1] + videos_cnt > 1 or (files_cnts[1] + videos_cnt == 1 and files_cnts[0] + images_cnt > 0):
283
  gr.Warning("Only supports single video file input right now!")
284
  return _question, _chat_bot, _app_cfg
 
 
 
285
 
286
  if params_form == 'Beam Search':
287
  params = {
@@ -349,9 +351,11 @@ def fewshot_add_demonstration(_image, _user_message, _assistant_message, _chat_b
349
  def fewshot_respond(_image, _user_message, _chat_bot, _app_cfg, params_form):
350
  user_message_contents = []
351
  _context = _app_cfg["ctx"].copy()
 
352
  if _image:
353
  image = Image.open(_image).convert("RGB")
354
  user_message_contents += [encode_image(image)]
 
355
  if _user_message:
356
  user_message_contents += [make_text(_user_message)]
357
  if user_message_contents:
@@ -373,6 +377,10 @@ def fewshot_respond(_image, _user_message, _chat_bot, _app_cfg, params_form):
373
  'repetition_penalty': 1.05,
374
  "max_new_tokens": 2048
375
  }
 
 
 
 
376
 
377
  code, _answer, _, sts = chat("", _context, None, params)
378
 
@@ -391,6 +399,7 @@ def fewshot_respond(_image, _user_message, _chat_bot, _app_cfg, params_form):
391
  if code == 0:
392
  _app_cfg['ctx']=_context
393
  _app_cfg['sts']=sts
 
394
  return None, '', '', _chat_bot, _app_cfg
395
 
396
 
 
44
  if device == 'mps':
45
  print('Error: running int4 model with bitsandbytes on Mac is not supported right now.')
46
  exit()
 
47
  model = AutoModel.from_pretrained(model_path, trust_remote_code=True)
48
  else:
49
  if True: #args.multi_gpus:
 
281
  if files_cnts[1] + videos_cnt > 1 or (files_cnts[1] + videos_cnt == 1 and files_cnts[0] + images_cnt > 0):
282
  gr.Warning("Only supports single video file input right now!")
283
  return _question, _chat_bot, _app_cfg
284
+ if files_cnts[1] + videos_cnt + files_cnts[0] + images_cnt <= 0:
285
+ gr.Warning("Please chat with at least one image or video.")
286
+ return _question, _chat_bot, _app_cfg
287
 
288
  if params_form == 'Beam Search':
289
  params = {
 
351
  def fewshot_respond(_image, _user_message, _chat_bot, _app_cfg, params_form):
352
  user_message_contents = []
353
  _context = _app_cfg["ctx"].copy()
354
+ images_cnt = _app_cfg["images_cnt"]
355
  if _image:
356
  image = Image.open(_image).convert("RGB")
357
  user_message_contents += [encode_image(image)]
358
+ images_cnt += 1
359
  if _user_message:
360
  user_message_contents += [make_text(_user_message)]
361
  if user_message_contents:
 
377
  'repetition_penalty': 1.05,
378
  "max_new_tokens": 2048
379
  }
380
+
381
+ if images_cnt == 0:
382
+ gr.Warning("Please chat with at least one image or video.")
383
+ return _image, _user_message, '', _chat_bot, _app_cfg
384
 
385
  code, _answer, _, sts = chat("", _context, None, params)
386
 
 
399
  if code == 0:
400
  _app_cfg['ctx']=_context
401
  _app_cfg['sts']=sts
402
+ _app_cfg['images_cnt'] = images_cnt
403
  return None, '', '', _chat_bot, _app_cfg
404
 
405