JohnSmith9982 commited on
Commit
051d37f
1 Parent(s): a949b5d

Upload 3 files

Browse files
Files changed (3) hide show
  1. app.py +75 -62
  2. presets.py +16 -1
  3. utils.py +12 -1
app.py CHANGED
@@ -43,7 +43,7 @@ else:
43
 
44
  gr.Chatbot.postprocess = postprocess
45
 
46
- with gr.Blocks(css=customCSS) as demo:
47
  history = gr.State([])
48
  token_count = gr.State([])
49
  promptTemplates = gr.State(load_template(get_template_names(plain=True)[0], mode=2))
@@ -51,71 +51,84 @@ with gr.Blocks(css=customCSS) as demo:
51
  FALSECONSTANT = gr.State(False)
52
  topic = gr.State("未命名对话历史记录")
53
 
 
 
 
54
  gr.HTML(title)
55
- with gr.Row():
56
- with gr.Column():
57
- keyTxt = gr.Textbox(show_label=True, placeholder=f"在这里输入你的OpenAI API-key...",value=my_api_key, type="password", visible=not HIDE_MY_KEY, label="API-Key")
58
- with gr.Column():
59
- with gr.Row():
60
- model_select_dropdown = gr.Dropdown(label="选择模型", choices=MODELS, multiselect=False, value=MODELS[0])
61
- use_streaming_checkbox = gr.Checkbox(label="实时传输回答", value=True, visible=enable_streaming_option)
62
- chatbot = gr.Chatbot() # .style(color_map=("#1D51EE", "#585A5B"))
63
- with gr.Row():
64
- with gr.Column(scale=12):
65
- user_input = gr.Textbox(show_label=False, placeholder="在这里输入").style(
66
- container=False)
67
- with gr.Column(min_width=50, scale=1):
68
- submitBtn = gr.Button("🚀", variant="primary")
69
- with gr.Row():
70
- emptyBtn = gr.Button("🧹 新的对话")
71
- retryBtn = gr.Button("🔄 重新生成")
72
- delLastBtn = gr.Button("🗑️ 删除最近一条对话")
73
- reduceTokenBtn = gr.Button("♻️ 总结对话")
74
- status_display = gr.Markdown("status: ready")
75
-
76
- systemPromptTxt = gr.Textbox(show_label=True, placeholder=f"在这里输入System Prompt...", label="System prompt", value=initial_prompt).style(container=True)
77
-
78
- with gr.Accordion(label="加载Prompt模板", open=False):
79
- with gr.Column():
80
- with gr.Row():
81
- with gr.Column(scale=6):
82
- templateFileSelectDropdown = gr.Dropdown(label="选择Prompt模板集合文件", choices=get_template_names(plain=True), multiselect=False, value=get_template_names(plain=True)[0])
83
- with gr.Column(scale=1):
84
- templateRefreshBtn = gr.Button("🔄 刷新")
85
- templaeFileReadBtn = gr.Button("📂 读入模板")
86
- with gr.Row():
87
- with gr.Column(scale=6):
88
- templateSelectDropdown = gr.Dropdown(label="从Prompt模板中加载", choices=load_template(get_template_names(plain=True)[0], mode=1), multiselect=False, value=load_template(get_template_names(plain=True)[0], mode=1)[0])
89
- with gr.Column(scale=1):
90
- templateApplyBtn = gr.Button("⬇️ 应用")
91
- with gr.Accordion(label="保存/加载对话历史记录", open=False):
92
- with gr.Column():
93
- with gr.Row():
94
- with gr.Column(scale=6):
95
- saveFileName = gr.Textbox(
96
- show_label=True, placeholder=f"在这里输入保存的文件名...", label="设置保存文件名", value="对话历史记录").style(container=True)
97
- with gr.Column(scale=1):
98
- saveHistoryBtn = gr.Button("💾 保存对话")
99
- with gr.Row():
100
- with gr.Column(scale=6):
101
- historyFileSelectDropdown = gr.Dropdown(label="从列表中加载对话", choices=get_history_names(plain=True), multiselect=False, value=get_history_names(plain=True)[0])
102
- with gr.Column(scale=1):
103
- historyRefreshBtn = gr.Button("🔄 刷新")
104
- historyReadBtn = gr.Button("📂 读入对话")
105
- #inputs, top_p, temperature, top_k, repetition_penalty
106
- with gr.Accordion("参数", open=False):
107
- top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.05,
108
- interactive=True, label="Top-p (nucleus sampling)",)
109
- temperature = gr.Slider(minimum=-0, maximum=5.0, value=1.0,
110
- step=0.1, interactive=True, label="Temperature",)
111
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
112
  gr.Markdown(description)
113
 
114
 
115
- user_input.submit(predict, [keyTxt, systemPromptTxt, history, user_input, chatbot, token_count, top_p, temperature, use_streaming_checkbox, model_select_dropdown], [chatbot, history, status_display, token_count], show_progress=True)
116
  user_input.submit(reset_textbox, [], [user_input])
117
 
118
- submitBtn.click(predict, [keyTxt, systemPromptTxt, history, user_input, chatbot, token_count, top_p, temperature, use_streaming_checkbox, model_select_dropdown], [chatbot, history, status_display, token_count], show_progress=True)
119
  submitBtn.click(reset_textbox, [], [user_input])
120
 
121
  emptyBtn.click(reset_state, outputs=[chatbot, history, token_count, status_display], show_progress=True)
@@ -134,13 +147,13 @@ with gr.Blocks(css=customCSS) as demo:
134
 
135
  historyRefreshBtn.click(get_history_names, None, [historyFileSelectDropdown])
136
 
137
- historyReadBtn.click(load_chat_history, [historyFileSelectDropdown, systemPromptTxt, history, chatbot], [saveFileName, systemPromptTxt, history, chatbot], show_progress=True)
138
 
139
  templateRefreshBtn.click(get_template_names, None, [templateFileSelectDropdown])
140
 
141
- templaeFileReadBtn.click(load_template, [templateFileSelectDropdown], [promptTemplates, templateSelectDropdown], show_progress=True)
142
 
143
- templateApplyBtn.click(get_template_content, [promptTemplates, templateSelectDropdown, systemPromptTxt], [systemPromptTxt], show_progress=True)
144
 
145
  logging.info(colorama.Back.GREEN + "\n川虎的温馨提示:访问 http://localhost:7860 查看界面" + colorama.Style.RESET_ALL)
146
  # 默认开启本地服务器,默认可以直接从IP访问,默认不创建公开分享链接
 
43
 
44
  gr.Chatbot.postprocess = postprocess
45
 
46
+ with gr.Blocks(css=customCSS,) as demo:
47
  history = gr.State([])
48
  token_count = gr.State([])
49
  promptTemplates = gr.State(load_template(get_template_names(plain=True)[0], mode=2))
 
51
  FALSECONSTANT = gr.State(False)
52
  topic = gr.State("未命名对话历史记录")
53
 
54
+ # gr.HTML("""
55
+ # <div style="text-align: center; margin-top: 20px;">
56
+ # """)
57
  gr.HTML(title)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
 
59
+ with gr.Row(scale=1).style(equal_height=True):
60
+
61
+ with gr.Column(scale=5):
62
+ with gr.Row(scale=1):
63
+ chatbot = gr.Chatbot().style(height=600) # .style(color_map=("#1D51EE", "#585A5B"))
64
+ with gr.Row(scale=1):
65
+ with gr.Column(scale=12):
66
+ user_input = gr.Textbox(show_label=False, placeholder="在这里输入").style(
67
+ container=False)
68
+ with gr.Column(min_width=50, scale=1):
69
+ submitBtn = gr.Button("🚀", variant="primary")
70
+ with gr.Row(scale=1):
71
+ emptyBtn = gr.Button("🧹 新的对话",)
72
+ retryBtn = gr.Button("🔄 重新生成")
73
+ delLastBtn = gr.Button("🗑️ 删除最近一条对话")
74
+ reduceTokenBtn = gr.Button("♻️ 总结��话")
75
+
76
+
77
+
78
+ with gr.Column():
79
+ with gr.Row(min_width=50,scale=1):
80
+ status_display = gr.Markdown("status: ready")
81
+ with gr.Tab(label="ChatGPT"):
82
+ keyTxt = gr.Textbox(show_label=True, placeholder=f"OpenAI API-key...",value=my_api_key, type="password", visible=not HIDE_MY_KEY, label="API-Key")
83
+ model_select_dropdown = gr.Dropdown(label="选择模型", choices=MODELS, multiselect=False, value=MODELS[0])
84
+ with gr.Accordion("参数", open=False):
85
+ top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.05,
86
+ interactive=True, label="Top-p (nucleus sampling)",)
87
+ temperature = gr.Slider(minimum=-0, maximum=5.0, value=1.0,
88
+ step=0.1, interactive=True, label="Temperature",)
89
+ use_streaming_checkbox = gr.Checkbox(label="实时传输回答", value=True, visible=enable_streaming_option)
90
+ use_websearch_checkbox = gr.Checkbox(label="使用在线搜索", value=False)
91
+
92
+ with gr.Tab(label="Prompt"):
93
+ systemPromptTxt = gr.Textbox(show_label=True, placeholder=f"在这里输入System Prompt...", label="System prompt", value=initial_prompt).style(container=True)
94
+ with gr.Accordion(label="加载Prompt模板", open=True):
95
+ with gr.Column():
96
+ with gr.Row():
97
+ with gr.Column(scale=6):
98
+ templateFileSelectDropdown = gr.Dropdown(label="选择Prompt模板集合文件", choices=get_template_names(plain=True), multiselect=False, value=get_template_names(plain=True)[0])
99
+ with gr.Column(scale=1):
100
+ templateRefreshBtn = gr.Button("🔄 刷新")
101
+ with gr.Row():
102
+ with gr.Column():
103
+ templateSelectDropdown = gr.Dropdown(label="从Prompt模板中加载", choices=load_template(get_template_names(plain=True)[0], mode=1), multiselect=False, value=load_template(get_template_names(plain=True)[0], mode=1)[0])
104
+
105
+ with gr.Tab(label="保存/加载"):
106
+ with gr.Accordion(label="保存/加载对话历史记录", open=True):
107
+ with gr.Column():
108
+ with gr.Row():
109
+ with gr.Column(scale=6):
110
+ saveFileName = gr.Textbox(
111
+ show_label=True, placeholder=f"在这里输入保存的文件名...", label="设置保存文件名", value="对话历史记录").style(container=True)
112
+ with gr.Column(scale=1):
113
+ saveHistoryBtn = gr.Button("💾 保存对话")
114
+ with gr.Row():
115
+ with gr.Column(scale=6):
116
+ historyFileSelectDropdown = gr.Dropdown(label="从列表中加载对话", choices=get_history_names(plain=True), multiselect=False, value=get_history_names(plain=True)[0])
117
+ with gr.Column(scale=1):
118
+ historyRefreshBtn = gr.Button("🔄 刷新")
119
+
120
+
121
+
122
+ gr.HTML("""
123
+ <div style="text-align: center; margin-top: 20px; margin-bottom: 20px;">
124
+ """)
125
  gr.Markdown(description)
126
 
127
 
128
+ user_input.submit(predict, [keyTxt, systemPromptTxt, history, user_input, chatbot, token_count, top_p, temperature, use_streaming_checkbox, model_select_dropdown, use_websearch_checkbox], [chatbot, history, status_display, token_count], show_progress=True)
129
  user_input.submit(reset_textbox, [], [user_input])
130
 
131
+ submitBtn.click(predict, [keyTxt, systemPromptTxt, history, user_input, chatbot, token_count, top_p, temperature, use_streaming_checkbox, model_select_dropdown, use_websearch_checkbox], [chatbot, history, status_display, token_count], show_progress=True)
132
  submitBtn.click(reset_textbox, [], [user_input])
133
 
134
  emptyBtn.click(reset_state, outputs=[chatbot, history, token_count, status_display], show_progress=True)
 
147
 
148
  historyRefreshBtn.click(get_history_names, None, [historyFileSelectDropdown])
149
 
150
+ historyFileSelectDropdown.change(load_chat_history, [historyFileSelectDropdown, systemPromptTxt, history, chatbot], [saveFileName, systemPromptTxt, history, chatbot], show_progress=True)
151
 
152
  templateRefreshBtn.click(get_template_names, None, [templateFileSelectDropdown])
153
 
154
+ templateFileSelectDropdown.change(load_template, [templateFileSelectDropdown], [promptTemplates, templateSelectDropdown], show_progress=True)
155
 
156
+ templateSelectDropdown.change(get_template_content, [promptTemplates, templateSelectDropdown, systemPromptTxt], [systemPromptTxt], show_progress=True)
157
 
158
  logging.info(colorama.Back.GREEN + "\n川虎的温馨提示:访问 http://localhost:7860 查看界面" + colorama.Style.RESET_ALL)
159
  # 默认开启本地服务器,默认可以直接从IP访问,默认不创建公开分享链接
presets.py CHANGED
@@ -1,5 +1,5 @@
1
  # -*- coding:utf-8 -*-
2
- title = """<h1 align="center">川虎ChatGPT 🚀</h1>"""
3
  description = """<div align=center>
4
 
5
  由Bilibili [土川虎虎虎](https://space.bilibili.com/29125536) 和 [明昭MZhao](https://space.bilibili.com/24807452)开发
@@ -29,10 +29,25 @@ pre code {
29
  color: #FFF;
30
  box-shadow: inset 0px 8px 16px hsla(0, 0%, 0%, .2)
31
  }
 
 
 
 
 
 
 
32
  """
33
 
34
  summarize_prompt = "你是谁?我们刚才聊了什么?" # 总结对话时的 prompt
35
  MODELS = ["gpt-3.5-turbo", "gpt-3.5-turbo-0301", "gpt-4","gpt-4-0314", "gpt-4-32k", "gpt-4-32k-0314"] # 可选的模型
 
 
 
 
 
 
 
 
36
 
37
  # 错误信息
38
  standard_error_msg = "☹️发生了错误:" # 错误信息的标准前缀
 
1
  # -*- coding:utf-8 -*-
2
+ title = """<h1 align="left">川虎ChatGPT 🚀</h1>"""
3
  description = """<div align=center>
4
 
5
  由Bilibili [土川虎虎虎](https://space.bilibili.com/29125536) 和 [明昭MZhao](https://space.bilibili.com/24807452)开发
 
29
  color: #FFF;
30
  box-shadow: inset 0px 8px 16px hsla(0, 0%, 0%, .2)
31
  }
32
+
33
+ *{
34
+ border-radius: 3px !important;
35
+ transition: all 0.6s;
36
+ }
37
+
38
+
39
  """
40
 
41
  summarize_prompt = "你是谁?我们刚才聊了什么?" # 总结对话时的 prompt
42
  MODELS = ["gpt-3.5-turbo", "gpt-3.5-turbo-0301", "gpt-4","gpt-4-0314", "gpt-4-32k", "gpt-4-32k-0314"] # 可选的模型
43
+ websearch_prompt = """Web search results:
44
+
45
+ {web_results}
46
+ Current date: {current_date}
47
+
48
+ Instructions: Using the provided web search results, write a comprehensive reply to the given query. Make sure to cite results using [[number](URL)] notation after the reference. If the provided search results refer to multiple subjects with the same name, write separate answers for each subject.
49
+ Query: {query}
50
+ Reply in 中文"""
51
 
52
  # 错误信息
53
  standard_error_msg = "☹️发生了错误:" # 错误信息的标准前缀
utils.py CHANGED
@@ -16,6 +16,8 @@ from presets import *
16
  import tiktoken
17
  from tqdm import tqdm
18
  import colorama
 
 
19
 
20
  # logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] %(message)s")
21
 
@@ -224,8 +226,17 @@ def predict_all(openai_api_key, system_prompt, history, inputs, chatbot, all_tok
224
  return chatbot, history, status_text, all_token_counts
225
 
226
 
227
- def predict(openai_api_key, system_prompt, history, inputs, chatbot, all_token_counts, top_p, temperature, stream=False, selected_model = MODELS[0], should_check_token_count = True): # repetition_penalty, top_k
228
  logging.info("输入为:" +colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL)
 
 
 
 
 
 
 
 
 
229
  if len(openai_api_key) != 51:
230
  status_text = standard_error_msg + no_apikey_msg
231
  logging.info(status_text)
 
16
  import tiktoken
17
  from tqdm import tqdm
18
  import colorama
19
+ from duckduckgo_search import ddg
20
+ import datetime
21
 
22
  # logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] %(message)s")
23
 
 
226
  return chatbot, history, status_text, all_token_counts
227
 
228
 
229
+ def predict(openai_api_key, system_prompt, history, inputs, chatbot, all_token_counts, top_p, temperature, stream=False, selected_model = MODELS[0], use_websearch_checkbox = False, should_check_token_count = True): # repetition_penalty, top_k
230
  logging.info("输入为:" +colorama.Fore.BLUE + f"{inputs}" + colorama.Style.RESET_ALL)
231
+ if use_websearch_checkbox:
232
+ results = ddg(inputs, max_results=3)
233
+ web_results = []
234
+ for idx, result in enumerate(results):
235
+ logging.info(f"搜索结果{idx + 1}:{result}")
236
+ web_results.append(f'[{idx+1}]"{result["body"]}"\nURL: {result["href"]}')
237
+ web_results = "\n\n".join(web_results)
238
+ today = datetime.datetime.today().strftime("%Y-%m-%d")
239
+ inputs = websearch_prompt.replace("{current_date}", today).replace("{query}", inputs).replace("{web_results}", web_results)
240
  if len(openai_api_key) != 51:
241
  status_text = standard_error_msg + no_apikey_msg
242
  logging.info(status_text)