johann22 commited on
Commit
1d0cf9b
1 Parent(s): 94e12b2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -11
app.py CHANGED
@@ -32,6 +32,8 @@ history = []
32
  def gen_from_infer(purpose,history,image,model_drop,chat_drop,choice):
33
  #out_img = infer(out_prompt)
34
  history.clear()
 
 
35
  out_prompt=generate(purpose,history,chat_drop)
36
  history.append((purpose,out_prompt))
37
  yield (history,None)
@@ -60,11 +62,12 @@ def format_prompt(message, history):
60
  prompt += f"[INST] {message} [/INST]"
61
  return prompt
62
 
63
- def run_gpt(in_prompt,history,model_drop):
64
  client = InferenceClient(c_models[int(model_drop)])
65
  print(f'history :: {history}')
66
  prompt=format_prompt(in_prompt,history)
67
- seed = random.randint(1,1111111111111111)
 
68
  print (seed)
69
  generate_kwargs = dict(
70
  temperature=1.0,
@@ -82,7 +85,7 @@ def run_gpt(in_prompt,history,model_drop):
82
  resp += response.token.text
83
  return resp
84
 
85
- def run_idefics(in_prompt,history,model_drop):
86
  client = InferenceClient("HuggingFaceM4/idefics-9b-instruct")
87
  print(f'history :: {history}')
88
  prompt=format_prompt(in_prompt,history)
@@ -106,21 +109,21 @@ def run_idefics(in_prompt,history,model_drop):
106
  return resp
107
 
108
 
109
- def generate(purpose,history,chat_drop):
110
  print (history)
111
- out_prompt = run_gpt(purpose,history,chat_drop)
112
  return out_prompt
113
 
114
- def describe(purpose,history,image,chat_drop):
115
  print (history)
116
  purpose=f"{purpose},![]({image})"
117
  out_prompt = run_idefics(purpose,history,chat_drop)
118
  return out_prompt
119
 
120
- def run(purpose,history,image,model_drop,chat_drop,choice):
121
  if choice == "Generate":
122
  #out_img = infer(out_prompt)
123
- out_prompt=generate(purpose,history,chat_drop)
124
  history.append((purpose,out_prompt))
125
  yield (history,None)
126
  model=loaded_model[int(model_drop)]
@@ -142,7 +145,7 @@ def run(purpose,history,image,model_drop,chat_drop,choice):
142
  yield ([(purpose,"an Error occured")],None)
143
  if choice == "Describe":
144
  #out_img = infer(out_prompt)
145
- out_prompt=describe(purpose,history,image,model_drop,chat_drop)
146
  history.append((purpose,out_prompt))
147
  yield (history,None)
148
 
@@ -173,6 +176,11 @@ with gr.Blocks(css=style) as iface:
173
  msg = gr.Textbox()
174
  model_drop=gr.Dropdown(label="Diffusion Models", type="index", choices=[m for m in models], value=models[0])
175
  chat_model_drop=gr.Dropdown(label="Chatbot Models", type="index", choices=[m for m in c_models], value=c_models[0])
 
 
 
 
 
176
  with gr.Group():
177
  with gr.Row():
178
  submit_b = gr.Button()
@@ -184,7 +192,7 @@ with gr.Blocks(css=style) as iface:
184
 
185
  run_test = test_btn.click(gen_from_infer, [msg,chatbot,sumbox,model_drop,chat_model_drop,agent_choice],[chatbot,sumbox],concurrency_limit=20)
186
 
187
- sub_b = submit_b.click(run, [msg,chatbot,sumbox,model_drop,chat_model_drop,agent_choice],[chatbot,sumbox],concurrency_limit=20)
188
- sub_e = msg.submit(run, [msg, chatbot,sumbox,model_drop,chat_model_drop,agent_choice], [chatbot,sumbox],concurrency_limit=20)
189
  stop_b.click(None,None,None, cancels=[sub_b,sub_e])
190
  iface.queue(default_concurrency_limit=None).launch()
 
32
  def gen_from_infer(purpose,history,image,model_drop,chat_drop,choice):
33
  #out_img = infer(out_prompt)
34
  history.clear()
35
+ if seed == 0:
36
+ seed = random.randint(1,1111111111111111)
37
  out_prompt=generate(purpose,history,chat_drop)
38
  history.append((purpose,out_prompt))
39
  yield (history,None)
 
62
  prompt += f"[INST] {message} [/INST]"
63
  return prompt
64
 
65
+ def run_gpt(in_prompt,history,model_drop,seed=0):
66
  client = InferenceClient(c_models[int(model_drop)])
67
  print(f'history :: {history}')
68
  prompt=format_prompt(in_prompt,history)
69
+ if seed == 0:
70
+ seed = random.randint(1,1111111111111111)
71
  print (seed)
72
  generate_kwargs = dict(
73
  temperature=1.0,
 
85
  resp += response.token.text
86
  return resp
87
 
88
+ def run_idefics(in_prompt,history,model_drop,seed):
89
  client = InferenceClient("HuggingFaceM4/idefics-9b-instruct")
90
  print(f'history :: {history}')
91
  prompt=format_prompt(in_prompt,history)
 
109
  return resp
110
 
111
 
112
+ def generate(purpose,history,chat_drop,seed):
113
  print (history)
114
+ out_prompt = run_gpt(purpose,history,chat_drop,seed)
115
  return out_prompt
116
 
117
+ def describe(purpose,history,image,chat_drop,seed):
118
  print (history)
119
  purpose=f"{purpose},![]({image})"
120
  out_prompt = run_idefics(purpose,history,chat_drop)
121
  return out_prompt
122
 
123
+ def run(purpose,history,image,model_drop,chat_drop,choice,seed):
124
  if choice == "Generate":
125
  #out_img = infer(out_prompt)
126
+ out_prompt=generate(purpose,history,chat_drop,seed)
127
  history.append((purpose,out_prompt))
128
  yield (history,None)
129
  model=loaded_model[int(model_drop)]
 
145
  yield ([(purpose,"an Error occured")],None)
146
  if choice == "Describe":
147
  #out_img = infer(out_prompt)
148
+ out_prompt=describe(purpose,history,image,model_drop,chat_drop,seed)
149
  history.append((purpose,out_prompt))
150
  yield (history,None)
151
 
 
176
  msg = gr.Textbox()
177
  model_drop=gr.Dropdown(label="Diffusion Models", type="index", choices=[m for m in models], value=models[0])
178
  chat_model_drop=gr.Dropdown(label="Chatbot Models", type="index", choices=[m for m in c_models], value=c_models[0])
179
+ chat_seed=gr.Slider(label="Seed (0 for random)", minimum=0,maximum=1000000000000,
180
+ value=random.randint(1,1000000000000),step=1,
181
+ interactive=True,
182
+ info="Set Seed to 0 to randomize the session")
183
+
184
  with gr.Group():
185
  with gr.Row():
186
  submit_b = gr.Button()
 
192
 
193
  run_test = test_btn.click(gen_from_infer, [msg,chatbot,sumbox,model_drop,chat_model_drop,agent_choice],[chatbot,sumbox],concurrency_limit=20)
194
 
195
+ sub_b = submit_b.click(run, [msg,chatbot,sumbox,model_drop,chat_model_drop,agent_choice,seed],[chatbot,sumbox],concurrency_limit=20)
196
+ sub_e = msg.submit(run, [msg, chatbot,sumbox,model_drop,chat_model_drop,agent_choice,seed], [chatbot,sumbox],concurrency_limit=20)
197
  stop_b.click(None,None,None, cancels=[sub_b,sub_e])
198
  iface.queue(default_concurrency_limit=None).launch()