MZhaovo commited on
Commit
6487726
1 Parent(s): d6526d1

feat. 自动命名功能完善啦

Browse files
ChuanhuChatbot.py CHANGED
@@ -371,6 +371,13 @@ with gr.Blocks(theme=small_and_beautiful_theme) as demo:
371
  use_streaming_checkbox = gr.Checkbox(
372
  label=i18n("实时传输回答"), value=True, visible=ENABLE_STREAMING_OPTION, elem_classes="switch-checkbox"
373
  )
 
 
 
 
 
 
 
374
  single_turn_checkbox = gr.Checkbox(label=i18n(
375
  "单轮对话"), value=False, elem_classes="switch-checkbox", elem_id="gr-single-session-cb", visible=False)
376
  # checkUpdateBtn = gr.Button(i18n("🔄 检查更新..."), visible=check_update)
@@ -544,20 +551,27 @@ with gr.Blocks(theme=small_and_beautiful_theme) as demo:
544
  fn=get_history_list, inputs=[user_name], outputs=[historySelectList]
545
  )
546
 
 
 
 
 
 
 
 
547
  # Chatbot
548
  cancelBtn.click(interrupt, [current_model], [])
549
 
550
  user_input.submit(**transfer_input_args).then(**
551
- chatgpt_predict_args).then(**end_outputing_args)
552
  user_input.submit(**get_usage_args)
553
 
554
- user_input.submit(auto_name_chat_history, [current_model, user_question, chatbot, user_name], [historySelectList], show_progress=False)
555
 
556
  submitBtn.click(**transfer_input_args).then(**chatgpt_predict_args,
557
- api_name="predict").then(**end_outputing_args)
558
  submitBtn.click(**get_usage_args)
559
 
560
- submitBtn.click(auto_name_chat_history, [current_model, user_question, chatbot, user_name], [historySelectList], show_progress=False)
561
 
562
  index_files.change(handle_file_upload, [current_model, index_files, chatbot, language_select_dropdown], [
563
  index_files, chatbot, status_display])
 
371
  use_streaming_checkbox = gr.Checkbox(
372
  label=i18n("实时传输回答"), value=True, visible=ENABLE_STREAMING_OPTION, elem_classes="switch-checkbox"
373
  )
374
+ name_chat_method = gr.Dropdown(
375
+ label=i18n("对话命名方式"),
376
+ choices=HISTORY_NAME_METHODS,
377
+ multiselect=False,
378
+ interactive=True,
379
+ value=HISTORY_NAME_METHODS[0],
380
+ )
381
  single_turn_checkbox = gr.Checkbox(label=i18n(
382
  "单轮对话"), value=False, elem_classes="switch-checkbox", elem_id="gr-single-session-cb", visible=False)
383
  # checkUpdateBtn = gr.Button(i18n("🔄 检查更新..."), visible=check_update)
 
551
  fn=get_history_list, inputs=[user_name], outputs=[historySelectList]
552
  )
553
 
554
+ auto_name_chat_history_args = dict(
555
+ fn=auto_name_chat_history,
556
+ inputs=[current_model, name_chat_method, user_question, chatbot, user_name, language_select_dropdown],
557
+ outputs=[historySelectList],
558
+ show_progress=False,
559
+ )
560
+
561
  # Chatbot
562
  cancelBtn.click(interrupt, [current_model], [])
563
 
564
  user_input.submit(**transfer_input_args).then(**
565
+ chatgpt_predict_args).then(**end_outputing_args).then(**auto_name_chat_history_args)
566
  user_input.submit(**get_usage_args)
567
 
568
+ # user_input.submit(auto_name_chat_history, [current_model, user_question, chatbot, user_name], [historySelectList], show_progress=False)
569
 
570
  submitBtn.click(**transfer_input_args).then(**chatgpt_predict_args,
571
+ api_name="predict").then(**end_outputing_args).then(**auto_name_chat_history_args)
572
  submitBtn.click(**get_usage_args)
573
 
574
+ # submitBtn.click(auto_name_chat_history, [current_model, user_question, chatbot, user_name], [historySelectList], show_progress=False)
575
 
576
  index_files.change(handle_file_upload, [current_model, index_files, chatbot, language_select_dropdown], [
577
  index_files, chatbot, status_display])
modules/models/base_model.py CHANGED
@@ -679,8 +679,9 @@ class BaseLLMModel:
679
  save_file(filename, self.system_prompt, self.history, chatbot, user_name)
680
  return init_history_list(user_name)
681
 
682
- def auto_name_chat_history(self, user_question, chatbot, user_name):
683
- if chatbot == []:
 
684
  filename = user_question[:16] + ".json"
685
  return self.rename_chat_history(filename, chatbot, user_name)
686
  else:
 
679
  save_file(filename, self.system_prompt, self.history, chatbot, user_name)
680
  return init_history_list(user_name)
681
 
682
+ def auto_name_chat_history(self, name_chat_method, user_question, chatbot, user_name, language):
683
+ if len(chatbot) == 1:
684
+ user_question = chatbot[0][0][26:-6]
685
  filename = user_question[:16] + ".json"
686
  return self.rename_chat_history(filename, chatbot, user_name)
687
  else:
modules/models/models.py CHANGED
@@ -228,9 +228,63 @@ class OpenAIClient(BaseLLMModel):
228
  ret = super().set_key(new_access_key)
229
  self._refresh_header()
230
  return ret
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231
 
232
- # def auto_name_chat_history(self, user_question, chatbot, user_name):
233
- # return super().auto_name_chat_history(user_question, chatbot, user_name)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
234
 
235
 
236
  class ChatGLM_Client(BaseLLMModel):
 
228
  ret = super().set_key(new_access_key)
229
  self._refresh_header()
230
  return ret
231
+
232
+ def _single_query_at_once(self, history, temperature=1.0):
233
+ timeout = TIMEOUT_ALL
234
+ headers = {
235
+ "Content-Type": "application/json",
236
+ "Authorization": f"Bearer {self.api_key}",
237
+ "temperature": f"{temperature}",
238
+ }
239
+ payload = {
240
+ "model": self.model_name,
241
+ "messages": history,
242
+ }
243
+ # 如果有自定义的api-host,使用自定义host发送请求,否则使用默认设置发送请求
244
+ if shared.state.completion_url != COMPLETION_URL:
245
+ logging.debug(f"使用自定义API URL: {shared.state.completion_url}")
246
 
247
+ with retrieve_proxy():
248
+ response = requests.post(
249
+ shared.state.completion_url,
250
+ headers=headers,
251
+ json=payload,
252
+ stream=False,
253
+ timeout=timeout,
254
+ )
255
+
256
+ return response
257
+
258
+
259
+ def auto_name_chat_history(self, name_chat_method, user_question, chatbot, user_name, language):
260
+ if len(chatbot) == 1:
261
+ # 用户问题示例”<div class="user-message">你好呀</div>“
262
+ user_question = chatbot[0][0][26:-6]
263
+ if name_chat_method == i18n("模型自动总结(消耗tokens)"):
264
+ # ai回答示例”<div class="raw-message hideM"><pre>你好!有什么我可以帮助你的吗?</pre></div><div class="md-message">\n\n你好!有什么我可以帮助你的吗?\n</div>“
265
+ pattern = r'<div class="raw-message hideM"><pre>(.*?)</pre></div><div class="md-message">'
266
+ match = re.search(pattern, chatbot[0][1])
267
+ ai_answer = match.group(1)
268
+ try:
269
+ history = [
270
+ { "role": "system", "content": f"Please summarize the following conversation for a chat topic.\nNo more than 16 characters.\nNo special characters.\nReply in {language}."},
271
+ { "role": "user", "content": f"User: {user_question}\nAssistant: {ai_answer}"}
272
+ ]
273
+ response = self._single_query_at_once(history, temperature=0.0)
274
+ response = json.loads(response.text)
275
+ content = response["choices"][0]["message"]["content"]
276
+ filename = content + ".json"
277
+ except Exception as e:
278
+ logging.info(f"自动命名失败。{e}")
279
+ filename = user_question[:16] + ".json"
280
+ return self.rename_chat_history(filename, chatbot, user_name)
281
+ elif name_chat_method == i18n("第一次提问"):
282
+ filename = user_question[:16] + ".json"
283
+ return self.rename_chat_history(filename, chatbot, user_name)
284
+ else:
285
+ return gr.update()
286
+ else:
287
+ return gr.update()
288
 
289
 
290
  class ChatGLM_Client(BaseLLMModel):
modules/presets.py CHANGED
@@ -36,6 +36,7 @@ BILLING_NOT_APPLICABLE_MSG = i18n("账单信息不适用") # 本地运行的模
36
  TIMEOUT_STREAMING = 60 # 流式对话时的超时时间
37
  TIMEOUT_ALL = 200 # 非流式对话时的超时时间
38
  ENABLE_STREAMING_OPTION = True # 是否启用选择选择是否实时显示回答的勾选框
 
39
  HIDE_MY_KEY = False # 如果你想在UI中隐藏你的 API 密钥,将此值设置为 True
40
  CONCURRENT_COUNT = 100 # 允许同时使用的用户数量
41
 
@@ -132,6 +133,12 @@ REPLY_LANGUAGES = [
132
  "跟随问题语言(不稳定)"
133
  ]
134
 
 
 
 
 
 
 
135
 
136
  WEBSEARCH_PTOMPT_TEMPLATE = """\
137
  Web search results:
 
36
  TIMEOUT_STREAMING = 60 # 流式对话时的超时时间
37
  TIMEOUT_ALL = 200 # 非流式对话时的超时时间
38
  ENABLE_STREAMING_OPTION = True # 是否启用选择选择是否实时显示回答的勾选框
39
+ ENABLE_LLM_NAME_CHAT_OPTION = True # 是否启用选择是否使用LLM模型的勾选框
40
  HIDE_MY_KEY = False # 如果你想在UI中隐藏你的 API 密钥,将此值设置为 True
41
  CONCURRENT_COUNT = 100 # 允许同时使用的用户数量
42
 
 
133
  "跟随问题语言(不稳定)"
134
  ]
135
 
136
+ HISTORY_NAME_METHODS = [
137
+ i18n("根据时间"),
138
+ i18n("第一次提问"),
139
+ i18n("模型自动总结(消耗tokens)"),
140
+ ]
141
+
142
 
143
  WEBSEARCH_PTOMPT_TEMPLATE = """\
144
  Web search results: