qingxu99 commited on
Commit
24780ee
1 Parent(s): b87bfea
Files changed (2) hide show
  1. main.py +11 -8
  2. toolbox.py +14 -2
main.py CHANGED
@@ -1,7 +1,7 @@
1
  import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染
2
  import gradio as gr
3
  from predict import predict
4
- from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf
5
 
6
  # 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
7
  proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT = \
@@ -87,8 +87,12 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
87
  system_prompt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", value=initial_prompt)
88
  top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
89
  temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
90
- checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区")
91
  gr.Markdown(description)
 
 
 
 
92
  # 功能区显示开关与功能区的互动
93
  def fn_area_visibility(a):
94
  ret = {}
@@ -97,17 +101,16 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
97
  return ret
98
  checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn] )
99
  # 整理反复出现的控件句柄组合
100
- input_combo = [txt, top_p, temperature, chatbot, history, system_prompt]
101
  output_combo = [chatbot, history, status]
102
- predict_args = dict(fn=predict, inputs=input_combo, outputs=output_combo)
103
- empty_txt_args = dict(fn=lambda: "", inputs=[], outputs=[txt]) # 用于在提交后清空输入栏
104
  # 提交按钮、重置按钮
105
- cancel_handles.append(txt.submit(**predict_args)) #; txt.submit(**empty_txt_args) 在提交后清空输入栏
106
- cancel_handles.append(submitBtn.click(**predict_args)) #; submitBtn.click(**empty_txt_args) 在提交后清空输入栏
107
  resetBtn.click(lambda: ([], [], "已重置"), None, output_combo)
108
  # 基础功能区的回调函数注册
109
  for k in functional:
110
- click_handle = functional[k]["Button"].click(predict, [*input_combo, gr.State(True), gr.State(k)], output_combo)
111
  cancel_handles.append(click_handle)
112
  # 文件上传区,接收文件后与chatbot的互动
113
  file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt])
 
1
  import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染
2
  import gradio as gr
3
  from predict import predict
4
+ from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper
5
 
6
  # 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
7
  proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT = \
 
87
  system_prompt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", value=initial_prompt)
88
  top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
89
  temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
90
+ checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区", "输入区2"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区")
91
  gr.Markdown(description)
92
+ with gr.Accordion("输入区", open=True, visible=False) as input_crazy_fn:
93
+ with gr.Row():
94
+ txt2 = gr.Textbox(show_label=False, placeholder="Input question here.", label="输入区2").style(container=False)
95
+
96
  # 功能区显示开关与功能区的互动
97
  def fn_area_visibility(a):
98
  ret = {}
 
101
  return ret
102
  checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn] )
103
  # 整理反复出现的控件句柄组合
104
+ input_combo = [txt, txt2, top_p, temperature, chatbot, history, system_prompt]
105
  output_combo = [chatbot, history, status]
106
+ predict_args = dict(fn=ArgsGeneralWrapper(predict), inputs=input_combo, outputs=output_combo)
 
107
  # 提交按钮、重置按钮
108
+ cancel_handles.append(txt.submit(**predict_args))
109
+ cancel_handles.append(submitBtn.click(**predict_args))
110
  resetBtn.click(lambda: ([], [], "已重置"), None, output_combo)
111
  # 基础功能区的回调函数注册
112
  for k in functional:
113
+ click_handle = functional[k]["Button"].click(fn=ArgsGeneralWrapper(predict), inputs=[*input_combo, gr.State(True), gr.State(k)], outputs=output_combo)
114
  cancel_handles.append(click_handle)
115
  # 文件上传区,接收文件后与chatbot的互动
116
  file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt])
toolbox.py CHANGED
@@ -2,6 +2,18 @@ import markdown, mdtex2html, threading, importlib, traceback, importlib, inspect
2
  from show_math import convert as convert_math
3
  from functools import wraps, lru_cache
4
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  def get_reduce_token_percent(text):
6
  try:
7
  # text = "maximum context length is 4097 tokens. However, your messages resulted in 4870 tokens"
@@ -116,7 +128,7 @@ def CatchException(f):
116
  from toolbox import get_conf
117
  proxies, = get_conf('proxies')
118
  tb_str = '```\n' + traceback.format_exc() + '```'
119
- if len(chatbot) == 0: chatbot.append(["插件调度异常","异常原因"])
120
  chatbot[-1] = (chatbot[-1][0], f"[Local Message] 实验性函数调用出错: \n\n{tb_str} \n\n当前代理可用性: \n\n{check_proxy(proxies)}")
121
  yield chatbot, history, f'异常 {e}'
122
  return decorated
@@ -129,7 +141,7 @@ def HotReload(f):
129
  def decorated(*args, **kwargs):
130
  fn_name = f.__name__
131
  f_hot_reload = getattr(importlib.reload(inspect.getmodule(f)), fn_name)
132
- yield from f_hot_reload(*args, **kwargs)
133
  return decorated
134
 
135
  def report_execption(chatbot, history, a, b):
 
2
  from show_math import convert as convert_math
3
  from functools import wraps, lru_cache
4
 
5
+ def ArgsGeneralWrapper(f):
6
+ """
7
+ 装饰器函数,用于重组输入参数,改变输入参数的顺序与结构。
8
+ """
9
+ def decorated(txt, txt2, top_p, temperature, chatbot, history, system_prompt, *args, **kwargs):
10
+ txt_passon = txt
11
+ if txt == "" and txt2 != "": txt_passon = txt2
12
+ yield from f(txt_passon, top_p, temperature, chatbot, history, system_prompt, *args, **kwargs)
13
+
14
+ return decorated
15
+
16
+
17
  def get_reduce_token_percent(text):
18
  try:
19
  # text = "maximum context length is 4097 tokens. However, your messages resulted in 4870 tokens"
 
128
  from toolbox import get_conf
129
  proxies, = get_conf('proxies')
130
  tb_str = '```\n' + traceback.format_exc() + '```'
131
+ if chatbot is None or len(chatbot) == 0: chatbot = [["插件调度异常","异常原因"]]
132
  chatbot[-1] = (chatbot[-1][0], f"[Local Message] 实验性函数调用出错: \n\n{tb_str} \n\n当前代理可用性: \n\n{check_proxy(proxies)}")
133
  yield chatbot, history, f'异常 {e}'
134
  return decorated
 
141
  def decorated(*args, **kwargs):
142
  fn_name = f.__name__
143
  f_hot_reload = getattr(importlib.reload(inspect.getmodule(f)), fn_name)
144
+ yield from ArgsGeneralWrapper(f_hot_reload)(*args, **kwargs)
145
  return decorated
146
 
147
  def report_execption(chatbot, history, a, b):