Omnibus commited on
Commit
f320162
1 Parent(s): b77c9c6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -10
app.py CHANGED
@@ -20,16 +20,16 @@ models=[
20
  client_z=[]
21
 
22
 
23
- def load_models(inp):
24
 
25
  out_box=[gr.Chatbot(),gr.Chatbot(),gr.Chatbot(),gr.Chatbot()]
26
  print(type(inp))
27
  print(inp)
28
- print(models[inp[0]])
29
  client_z.clear()
30
  for z,ea in enumerate(inp):
31
- client_z.append(InferenceClient(models[inp[z]]))
32
- out_box[z]=(gr.update(label=models[inp[z]]))
33
  return out_box[0],out_box[1],out_box[2],out_box[3]
34
 
35
  def format_prompt_default(message, history):
@@ -66,10 +66,10 @@ def format_prompt_mixtral(message, history):
66
  prompt += f"[INST] {message} [/INST]"
67
  return prompt
68
 
69
- def format_prompt_choose(message, history, model_name):
70
- if "gemma" in models[model_name].lower() and "it" in models[model_name].lower():
71
  return format_prompt_gemma(message,history)
72
- if "mixtral" in models[model_name].lower():
73
  return format_prompt_mixtral(message,history)
74
  else:
75
  return format_prompt_default(message,history)
@@ -219,6 +219,10 @@ def chat_inf_d(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
219
  yield history
220
  else:
221
  yield None
 
 
 
 
222
  def clear_fn():
223
  return None,None,None,None,None,None
224
  rand_val=random.randint(1,1111111111111111)
@@ -229,6 +233,7 @@ def check_rand(inp,val):
229
  return gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, value=int(val))
230
 
231
  with gr.Blocks() as app:
 
232
  gr.HTML("""<center><h1 style='font-size:xx-large;'>Chatbot Model Compare</h1><br><h3>running on Huggingface Inference Client</h3><br><h7>EXPERIMENTAL""")
233
  with gr.Row():
234
  chat_a = gr.Chatbot(height=500)
@@ -248,8 +253,9 @@ with gr.Blocks() as app:
248
  with gr.Group():
249
  stop_btn=gr.Button("Stop")
250
  clear_btn=gr.Button("Clear")
251
- client_choice=gr.Dropdown(label="Models",type='index',choices=[c for c in models],max_choices=4,multiselect=True,interactive=True)
252
-
 
253
  with gr.Column(scale=1):
254
  with gr.Group():
255
  rand = gr.Checkbox(label="Random Seed", value=True)
@@ -273,7 +279,10 @@ with gr.Blocks() as app:
273
  hid1=gr.Number(value=1,visible=False)
274
  hid2=gr.Number(value=2,visible=False)
275
  hid3=gr.Number(value=3,visible=False)
276
- hid4=gr.Number(value=4,visible=False)
 
 
 
277
  client_choice.change(load_models,client_choice,[chat_a,chat_b,chat_c,chat_d])
278
 
279
  #im_go=im_btn.click(get_screenshot,[chat_b,im_height,im_width,chatblock,theme,wait_time],img)
 
20
  client_z=[]
21
 
22
 
23
+ def load_models(inp,new_models):
24
 
25
  out_box=[gr.Chatbot(),gr.Chatbot(),gr.Chatbot(),gr.Chatbot()]
26
  print(type(inp))
27
  print(inp)
28
+ print(new_models[inp[0]])
29
  client_z.clear()
30
  for z,ea in enumerate(inp):
31
+ client_z.append(InferenceClient(new_models[inp[z]]))
32
+ out_box[z]=(gr.update(label=new_models[inp[z]]))
33
  return out_box[0],out_box[1],out_box[2],out_box[3]
34
 
35
  def format_prompt_default(message, history):
 
66
  prompt += f"[INST] {message} [/INST]"
67
  return prompt
68
 
69
+ def format_prompt_choose(message, history, model_name, new_models):
70
+ if "gemma" in new_models[model_name].lower() and "it" in new_models[model_name].lower():
71
  return format_prompt_gemma(message,history)
72
+ if "mixtral" in new_models[model_name].lower():
73
  return format_prompt_mixtral(message,history)
74
  else:
75
  return format_prompt_default(message,history)
 
219
  yield history
220
  else:
221
  yield None
222
+ def add_new_model(inp, cur):
223
+ cur.append(inp)
224
+ return cur
225
+
226
  def clear_fn():
227
  return None,None,None,None,None,None
228
  rand_val=random.randint(1,1111111111111111)
 
233
  return gr.Slider(label="Seed", minimum=1, maximum=1111111111111111, value=int(val))
234
 
235
  with gr.Blocks() as app:
236
+ new_models=gr.State(models)
237
  gr.HTML("""<center><h1 style='font-size:xx-large;'>Chatbot Model Compare</h1><br><h3>running on Huggingface Inference Client</h3><br><h7>EXPERIMENTAL""")
238
  with gr.Row():
239
  chat_a = gr.Chatbot(height=500)
 
253
  with gr.Group():
254
  stop_btn=gr.Button("Stop")
255
  clear_btn=gr.Button("Clear")
256
+ client_choice=gr.Dropdown(label="Models",type='index',choices=[c for c in new_models],max_choices=4,multiselect=True,interactive=True)
257
+ add_model=gr.Textbox(label="New Model")
258
+ add_btn=gr.Button("Add Model")
259
  with gr.Column(scale=1):
260
  with gr.Group():
261
  rand = gr.Checkbox(label="Random Seed", value=True)
 
279
  hid1=gr.Number(value=1,visible=False)
280
  hid2=gr.Number(value=2,visible=False)
281
  hid3=gr.Number(value=3,visible=False)
282
+ hid4=gr.Number(value=4,visible=False)
283
+
284
+
285
+ add_btn.click(add_new_model,[add_model,new_models],new_models)
286
  client_choice.change(load_models,client_choice,[chat_a,chat_b,chat_c,chat_d])
287
 
288
  #im_go=im_btn.click(get_screenshot,[chat_b,im_height,im_width,chatblock,theme,wait_time],img)