laurenssam commited on
Commit
2c412e1
·
1 Parent(s): 83f3ad0

fix interface

Browse files
Files changed (1) hide show
  1. app.py +43 -43
app.py CHANGED
@@ -197,49 +197,49 @@ def http_bot(state, temperature, top_p, max_new_tokens):
197
 
198
  if len(state.messages) == state.offset + 2:
199
  # First round of conversation
200
-
201
- if "tinyllava" in model_name.lower():
202
- if "3.1b" in model_name.lower() or "phi" in model_name.lower():
203
- template_name = "phi"
204
- elif "2.0b" in model_name.lower() or "stablelm" in model_name.lower():
205
- template_name = "phi"
206
- elif "qwen" in model_name.lower():
207
- template_name = "qwen"
208
- else:
209
- template_name = "v1"
210
-
211
- elif "llava" in model_name.lower():
212
-
213
- if "llama-2" in model_name.lower():
214
- template_name = "llava_llama_2"
215
- elif "v1" in model_name.lower():
216
- if "mmtag" in model_name.lower():
217
- template_name = "v1_mmtag"
218
- elif (
219
- "plain" in model_name.lower()
220
- and "finetune" not in model_name.lower()
221
- ):
222
- template_name = "v1_mmtag"
223
- else:
224
- template_name = "llava_v1"
225
- elif "mpt" in model_name.lower():
226
- template_name = "mpt"
227
- else:
228
- if "mmtag" in model_name.lower():
229
- template_name = "v0_mmtag"
230
- elif (
231
- "plain" in model_name.lower()
232
- and "finetune" not in model_name.lower()
233
- ):
234
- template_name = "v0_mmtag"
235
- else:
236
- template_name = "llava_v0"
237
- elif "mpt" in model_name:
238
- template_name = "mpt_text"
239
- elif "llama-2" in model_name:
240
- template_name = "llama_2"
241
- else:
242
- template_name = "vicuna_v1"
243
  new_state = conv_templates[template_name].copy()
244
  new_state.append_message(new_state.roles[0], state.messages[-2][1])
245
  new_state.append_message(new_state.roles[1], None)
 
197
 
198
  if len(state.messages) == state.offset + 2:
199
  # First round of conversation
200
+ template_name = 'phi'
201
+ # if "tinyllava" in model_name.lower():
202
+ # if "3.1b" in model_name.lower() or "phi" in model_name.lower():
203
+ # template_name = "phi"
204
+ # elif "2.0b" in model_name.lower() or "stablelm" in model_name.lower():
205
+ # template_name = "phi"
206
+ # elif "qwen" in model_name.lower():
207
+ # template_name = "qwen"
208
+ # else:
209
+ # template_name = "v1"
210
+
211
+ # elif "llava" in model_name.lower():
212
+
213
+ # if "llama-2" in model_name.lower():
214
+ # template_name = "llava_llama_2"
215
+ # elif "v1" in model_name.lower():
216
+ # if "mmtag" in model_name.lower():
217
+ # template_name = "v1_mmtag"
218
+ # elif (
219
+ # "plain" in model_name.lower()
220
+ # and "finetune" not in model_name.lower()
221
+ # ):
222
+ # template_name = "v1_mmtag"
223
+ # else:
224
+ # template_name = "llava_v1"
225
+ # elif "mpt" in model_name.lower():
226
+ # template_name = "mpt"
227
+ # else:
228
+ # if "mmtag" in model_name.lower():
229
+ # template_name = "v0_mmtag"
230
+ # elif (
231
+ # "plain" in model_name.lower()
232
+ # and "finetune" not in model_name.lower()
233
+ # ):
234
+ # template_name = "v0_mmtag"
235
+ # else:
236
+ # template_name = "llava_v0"
237
+ # elif "mpt" in model_name:
238
+ # template_name = "mpt_text"
239
+ # elif "llama-2" in model_name:
240
+ # template_name = "llama_2"
241
+ # else:
242
+ # template_name = "vicuna_v1"
243
  new_state = conv_templates[template_name].copy()
244
  new_state.append_message(new_state.roles[0], state.messages[-2][1])
245
  new_state.append_message(new_state.roles[1], None)