liltom-eth commited on
Commit
11a6467
1 Parent(s): dc6df17

Upload code/inference.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. code/inference.py +19 -11
code/inference.py CHANGED
@@ -45,17 +45,25 @@ def predict_fn(data, model_and_tokenizer):
45
  temperature = data.pop("temperature", 0.2)
46
  conv_mode = data.pop("conv_mode", "llava_v1")
47
 
48
- # conv_mode = "llava_v1"
49
- conv = conv_templates[conv_mode].copy()
50
- roles = conv.roles
51
- inp = f"{roles[0]}: {raw_prompt}"
52
- inp = (
53
- DEFAULT_IM_START_TOKEN + DEFAULT_IMAGE_TOKEN + DEFAULT_IM_END_TOKEN + "\n" + inp
54
- )
55
- conv.append_message(conv.roles[0], inp)
56
- conv.append_message(conv.roles[1], None)
57
- prompt = conv.get_prompt()
58
- stop_str = conv.sep if conv.sep_style != SeparatorStyle.TWO else conv.sep2
 
 
 
 
 
 
 
 
59
 
60
  if image_file.startswith("http") or image_file.startswith("https"):
61
  response = requests.get(image_file)
 
45
  temperature = data.pop("temperature", 0.2)
46
  conv_mode = data.pop("conv_mode", "llava_v1")
47
 
48
+ if conv_mode == "raw":
49
+ # use raw_prompt as prompt
50
+ prompt = raw_prompt
51
+ stop_str = "###"
52
+ else:
53
+ conv = conv_templates[conv_mode].copy()
54
+ roles = conv.roles
55
+ inp = f"{roles[0]}: {raw_prompt}"
56
+ inp = (
57
+ DEFAULT_IM_START_TOKEN
58
+ + DEFAULT_IMAGE_TOKEN
59
+ + DEFAULT_IM_END_TOKEN
60
+ + "\n"
61
+ + inp
62
+ )
63
+ conv.append_message(conv.roles[0], inp)
64
+ conv.append_message(conv.roles[1], None)
65
+ prompt = conv.get_prompt()
66
+ stop_str = conv.sep if conv.sep_style != SeparatorStyle.TWO else conv.sep2
67
 
68
  if image_file.startswith("http") or image_file.startswith("https"):
69
  response = requests.get(image_file)