qingxu99 commited on
Commit
f046837
1 Parent(s): 487ffe7

待调查的BUG

Browse files
Files changed (1) hide show
  1. request_llm/bridge_newbing.py +1 -0
request_llm/bridge_newbing.py CHANGED
@@ -239,6 +239,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
239
  history_feedin.append([history[2*i], history[2*i+1]] )
240
 
241
  chatbot[-1] = (inputs, "[Local Message]: 等待NewBing响应中 ...")
 
242
  yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。")
243
  for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
244
  chatbot[-1] = (inputs, preprocess_newbing_out(response))
 
239
  history_feedin.append([history[2*i], history[2*i+1]] )
240
 
241
  chatbot[-1] = (inputs, "[Local Message]: 等待NewBing响应中 ...")
242
+ response = "[Local Message]: 等待NewBing响应中 ..."
243
  yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。")
244
  for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
245
  chatbot[-1] = (inputs, preprocess_newbing_out(response))