修复变量名
Browse files- crazy_functions/高级功能函数模板.py +4 -1
- main.py +3 -3
crazy_functions/高级功能函数模板.py
CHANGED
@@ -5,10 +5,13 @@ import datetime
|
|
5 |
@CatchException
|
6 |
def 高阶功能模板函数(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
7 |
history = [] # 清空历史,以免输入溢出
|
|
|
|
|
|
|
8 |
for i in range(5):
|
9 |
currentMonth = (datetime.date.today() + datetime.timedelta(days=i)).month
|
10 |
currentDay = (datetime.date.today() + datetime.timedelta(days=i)).day
|
11 |
-
i_say = f'历史中哪些事件发生在{currentMonth}月{currentDay}日?列举两条并发送相关图片。发送图片时,请使用Markdown,将Unsplash API中的PUT_YOUR_QUERY_HERE
|
12 |
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
13 |
yield chatbot, history, '正常' # 由于请求gpt需要一段时间,我们先及时地做一次状态显示
|
14 |
|
|
|
5 |
@CatchException
|
6 |
def 高阶功能模板函数(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
7 |
history = [] # 清空历史,以免输入溢出
|
8 |
+
chatbot.append(("这是什么功能?", "[Local Message] 请注意,您正在调用一个函数模板,该函数面向希望实现更多有趣功能的开发者,它可以作为创建新功能函数的模板。为了做到简单易读,该函数只有25行代码,不会实时反馈文字流或心跳,请耐心等待程序输出完成。另外您若希望分享新的功能模组,请不吝PR!"))
|
9 |
+
yield chatbot, history, '正常' # 由于请求gpt需要一段时间,我们先及时地做一次状态显示
|
10 |
+
|
11 |
for i in range(5):
|
12 |
currentMonth = (datetime.date.today() + datetime.timedelta(days=i)).month
|
13 |
currentDay = (datetime.date.today() + datetime.timedelta(days=i)).day
|
14 |
+
i_say = f'历史中哪些事件发生在{currentMonth}月{currentDay}日?列举两条并发送相关图片。发送图片时,请使用Markdown,将Unsplash API中的PUT_YOUR_QUERY_HERE替换成描述改事件的三个最重要的单词。'
|
15 |
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
16 |
yield chatbot, history, '正常' # 由于请求gpt需要一段时间,我们先及时地做一次状态显示
|
17 |
|
main.py
CHANGED
@@ -74,7 +74,7 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False) as demo:
|
|
74 |
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
|
75 |
temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
|
76 |
|
77 |
-
predict_args = dict(fn=predict, inputs=[txt, top_p, temperature, chatbot, history,
|
78 |
empty_txt_args = dict(fn=lambda: "", inputs=[], outputs=[txt])
|
79 |
|
80 |
txt.submit(**predict_args)
|
@@ -84,11 +84,11 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False) as demo:
|
|
84 |
resetBtn.click(lambda: ([], [], "已重置"), None, [chatbot, history, statusDisplay])
|
85 |
for k in functional:
|
86 |
functional[k]["Button"].click(predict,
|
87 |
-
[txt, top_p, temperature, chatbot, history,
|
88 |
file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt])
|
89 |
for k in crazy_functional:
|
90 |
click_handle = crazy_functional[k]["Button"].click(crazy_functional[k]["Function"],
|
91 |
-
[txt, top_p, temperature, chatbot, history,
|
92 |
)
|
93 |
try: click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
|
94 |
except: pass
|
|
|
74 |
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
|
75 |
temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
|
76 |
|
77 |
+
predict_args = dict(fn=predict, inputs=[txt, top_p, temperature, chatbot, history, system_prompt], outputs=[chatbot, history, statusDisplay], show_progress=True)
|
78 |
empty_txt_args = dict(fn=lambda: "", inputs=[], outputs=[txt])
|
79 |
|
80 |
txt.submit(**predict_args)
|
|
|
84 |
resetBtn.click(lambda: ([], [], "已重置"), None, [chatbot, history, statusDisplay])
|
85 |
for k in functional:
|
86 |
functional[k]["Button"].click(predict,
|
87 |
+
[txt, top_p, temperature, chatbot, history, system_prompt, gr.State(True), gr.State(k)], [chatbot, history, statusDisplay], show_progress=True)
|
88 |
file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt])
|
89 |
for k in crazy_functional:
|
90 |
click_handle = crazy_functional[k]["Button"].click(crazy_functional[k]["Function"],
|
91 |
+
[txt, top_p, temperature, chatbot, history, system_prompt, gr.State(PORT)], [chatbot, history, statusDisplay]
|
92 |
)
|
93 |
try: click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
|
94 |
except: pass
|