3v324v23 commited on
Commit
b7dca67
1 Parent(s): c2dcab0

处理多线程中出现的网络问题

Browse files
Files changed (1) hide show
  1. crazy_functions/crazy_utils.py +11 -5
crazy_functions/crazy_utils.py CHANGED
@@ -1,4 +1,4 @@
1
-
2
 
3
  def request_gpt_model_in_new_thread_with_ui_alive(inputs, inputs_show_user, top_p, temperature, chatbot, history, sys_prompt, refresh_interval=0.2):
4
  import time
@@ -43,10 +43,16 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency(inp
43
  mutable = [["", time.time()] for _ in range(n_frag)]
44
 
45
  def _req_gpt(index, inputs, history, sys_prompt):
46
- gpt_say = predict_no_ui_long_connection(
47
- inputs=inputs, top_p=top_p, temperature=temperature, history=history, sys_prompt=sys_prompt, observe_window=mutable[
48
- index]
49
- )
 
 
 
 
 
 
50
  return gpt_say
51
  # 异步任务开始
52
  futures = [executor.submit(_req_gpt, index, inputs, history, sys_prompt) for index, inputs, history, sys_prompt in zip(
 
1
+ import traceback
2
 
3
  def request_gpt_model_in_new_thread_with_ui_alive(inputs, inputs_show_user, top_p, temperature, chatbot, history, sys_prompt, refresh_interval=0.2):
4
  import time
 
43
  mutable = [["", time.time()] for _ in range(n_frag)]
44
 
45
  def _req_gpt(index, inputs, history, sys_prompt):
46
+ try:
47
+ gpt_say = predict_no_ui_long_connection(
48
+ inputs=inputs, top_p=top_p, temperature=temperature, history=history, sys_prompt=sys_prompt, observe_window=mutable[index]
49
+ )
50
+ except:
51
+ # 收拾残局
52
+ tb_str = '```\n' + traceback.format_exc() + '```'
53
+ gpt_say = f"[Local Message] 线程{index}在执行过程中遭遇问题, Traceback:\n\n{tb_str}\n\n"
54
+ if len(mutable[index][0]) > 0:
55
+ gpt_say += "此线程失败前收到的回答:" + mutable[index][0]
56
  return gpt_say
57
  # 异步任务开始
58
  futures = [executor.submit(_req_gpt, index, inputs, history, sys_prompt) for index, inputs, history, sys_prompt in zip(