Omnibus commited on
Commit
d9d28c8
1 Parent(s): 469a43a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -103,6 +103,8 @@ def chat_inf_tree(system_prompt,prompt,history,client_choice,seed,temp,tokens,to
103
 
104
  def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
105
  if len(client_choice)>=hid_val:
 
 
106
  client=client_z[int(hid_val)-1]
107
  if not history:
108
  history = []
@@ -116,7 +118,7 @@ def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
116
  seed=seed,
117
  )
118
  #formatted_prompt=prompt
119
- formatted_prompt = format_prompt_choose(f"{system_prompt}, {prompt}", history, client_choice[0])
120
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
121
  output = ""
122
  for response in stream:
@@ -130,6 +132,8 @@ def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
130
 
131
  def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
132
  if len(client_choice)>=hid_val:
 
 
133
  client=client_z[int(hid_val)-1]
134
  if not history:
135
  history = []
@@ -143,7 +147,7 @@ def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p
143
  seed=seed,
144
  )
145
  #formatted_prompt=prompt
146
- formatted_prompt = format_prompt_choose(f"{system_prompt}, {prompt}", history, client_choice[1])
147
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
148
  output = ""
149
  for response in stream:
 
103
 
104
  def chat_inf_a(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
105
  if len(client_choice)>=hid_val:
106
+ if system_prompt:
107
+ system_prompt=f'{system_prompt}, '
108
  client=client_z[int(hid_val)-1]
109
  if not history:
110
  history = []
 
118
  seed=seed,
119
  )
120
  #formatted_prompt=prompt
121
+ formatted_prompt = format_prompt_choose(f"{system_prompt}{prompt}", history, client_choice[0])
122
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
123
  output = ""
124
  for response in stream:
 
132
 
133
  def chat_inf_b(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,rep_p,hid_val):
134
  if len(client_choice)>=hid_val:
135
+ if system_prompt:
136
+ system_prompt=f'{system_prompt}, '
137
  client=client_z[int(hid_val)-1]
138
  if not history:
139
  history = []
 
147
  seed=seed,
148
  )
149
  #formatted_prompt=prompt
150
+ formatted_prompt = format_prompt_choose(f"{system_prompt}{prompt}", history, client_choice[1])
151
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
152
  output = ""
153
  for response in stream: