diff --git a/app.py b/app.py index e0b461b5ee34ef57fad93fc7014febfada2cd09f..b3b5abb3423a4ff5a50a580481ad0b4dfeb68d09 100644 --- a/app.py +++ b/app.py @@ -1,9 +1,9 @@ import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染 help_menu_description = \ -"""Github源代码开源和更新[地址🚀](https://github.com/binary-husky/gpt_academic), +"""Github源代码开源和更新[地址🚀](https://github.com/binary-husky/gpt_academic), 感谢热情的[开发者们❤️](https://github.com/binary-husky/gpt_academic/graphs/contributors). -

常见问题请查阅[项目Wiki](https://github.com/binary-husky/gpt_academic/wiki), +

常见问题请查阅[项目Wiki](https://github.com/binary-husky/gpt_academic/wiki), 如遇到Bug请前往[Bug反馈](https://github.com/binary-husky/gpt_academic/issues).

普通对话使用说明: 1. 输入问题; 2. 点击提交

基础功能区使用说明: 1. 输入文本; 2. 点击任意基础功能区按钮 @@ -15,27 +15,27 @@ help_menu_description = \ def main(): import subprocess, sys - subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'https://fastly.jsdelivr.net/gh/binary-husky/gradio-fix@gpt-academic/release/gradio-3.32.7-py3-none-any.whl']) + subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'https://public.agent-matrix.com/publish/gradio-3.32.8-py3-none-any.whl']) import gradio as gr - if gr.__version__ not in ['3.32.6', '3.32.7']: + if gr.__version__ not in ['3.32.8']: raise ModuleNotFoundError("使用项目内置Gradio获取最优体验! 请运行 `pip install -r requirements.txt` 指令安装内置Gradio及其他依赖, 详情信息见requirements.txt.") from request_llms.bridge_all import predict from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper, load_chat_cookies, DummyWith # 建议您复制一个config_private.py放自己的秘密, 如API和代理网址 proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION = get_conf('proxies', 'WEB_PORT', 'LLM_MODEL', 'CONCURRENT_COUNT', 'AUTHENTICATION') CHATBOT_HEIGHT, LAYOUT, AVAIL_LLM_MODELS, AUTO_CLEAR_TXT = get_conf('CHATBOT_HEIGHT', 'LAYOUT', 'AVAIL_LLM_MODELS', 'AUTO_CLEAR_TXT') - ENABLE_AUDIO, AUTO_CLEAR_TXT, PATH_LOGGING, AVAIL_THEMES, THEME = get_conf('ENABLE_AUDIO', 'AUTO_CLEAR_TXT', 'PATH_LOGGING', 'AVAIL_THEMES', 'THEME') + ENABLE_AUDIO, AUTO_CLEAR_TXT, PATH_LOGGING, AVAIL_THEMES, THEME, ADD_WAIFU = get_conf('ENABLE_AUDIO', 'AUTO_CLEAR_TXT', 'PATH_LOGGING', 'AVAIL_THEMES', 'THEME', 'ADD_WAIFU') DARK_MODE, NUM_CUSTOM_BASIC_BTN, SSL_KEYFILE, SSL_CERTFILE = get_conf('DARK_MODE', 'NUM_CUSTOM_BASIC_BTN', 'SSL_KEYFILE', 'SSL_CERTFILE') INIT_SYS_PROMPT = get_conf('INIT_SYS_PROMPT') # 如果WEB_PORT是-1, 则随机选取WEB端口 PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT from check_proxy import get_current_version - from themes.theme import adjust_theme, advanced_css, theme_declaration - from themes.theme import js_code_for_css_changing, js_code_for_darkmode_init, js_code_for_toggle_darkmode, js_code_for_persistent_cookie_init + from themes.theme import adjust_theme, advanced_css, theme_declaration, js_code_clear, js_code_reset, js_code_show_or_hide, js_code_show_or_hide_group2 + from themes.theme import js_code_for_css_changing, js_code_for_toggle_darkmode, js_code_for_persistent_cookie_init from themes.theme import load_dynamic_theme, to_cookie_str, from_cookie_str, init_cookie title_html = f"

GPT 学术优化 {get_current_version()}

{theme_declaration}" - + # 问询记录, python 版本建议3.9+(越新越好) import logging, uuid os.makedirs(PATH_LOGGING, exist_ok=True) @@ -67,7 +67,7 @@ def main(): proxy_info = check_proxy(proxies) gr_L1 = lambda: gr.Row().style() - gr_L2 = lambda scale, elem_id: gr.Column(scale=scale, elem_id=elem_id) + gr_L2 = lambda scale, elem_id: gr.Column(scale=scale, elem_id=elem_id, min_width=400) if LAYOUT == "TOP-DOWN": gr_L1 = lambda: DummyWith() gr_L2 = lambda scale, elem_id: gr.Row() @@ -79,7 +79,7 @@ def main(): with gr.Blocks(title="GPT 学术优化", theme=set_theme, analytics_enabled=False, css=advanced_css) as demo: gr.HTML(title_html) gr.HTML('''
Duplicate Space请您打开此页面后务必点击上方的“复制空间”(Duplicate Space)按钮!使用时,先在输入框填入API-KEY然后回车。
切忌在“复制空间”(Duplicate Space)之前填入API_KEY或进行提问,否则您的API_KEY将极可能被空间所有者攫取!
支持任意数量的OpenAI的密钥和API2D的密钥共存,例如输入"OpenAI密钥1,API2D密钥2",然后提交,即可同时使用两种模型接口。
''') - secret_css, dark_mode, persistent_cookie = gr.Textbox(visible=False), gr.Textbox(DARK_MODE, visible=False), gr.Textbox(visible=False) + secret_css, dark_mode, py_pickle_cookie = gr.Textbox(visible=False), gr.Textbox(DARK_MODE, visible=False), gr.Textbox(visible=False) cookies = gr.State(load_chat_cookies()) with gr_L1(): with gr_L2(scale=2, elem_id="gpt-chat"): @@ -96,11 +96,12 @@ def main(): resetBtn = gr.Button("重置", elem_id="elem_reset", variant="secondary"); resetBtn.style(size="sm") stopBtn = gr.Button("停止", elem_id="elem_stop", variant="secondary"); stopBtn.style(size="sm") clearBtn = gr.Button("清除", elem_id="elem_clear", variant="secondary", visible=False); clearBtn.style(size="sm") - if ENABLE_AUDIO: + if ENABLE_AUDIO: with gr.Row(): audio_mic = gr.Audio(source="microphone", type="numpy", elem_id="elem_audio", streaming=True, show_label=False).style(container=False) with gr.Row(): status = gr.Markdown(f"Tip: 按Enter提交, 按Shift+Enter换行。当前模型: {LLM_MODEL} \n {proxy_info}", elem_id="state-panel") + with gr.Accordion("基础功能区", open=True, elem_id="basic-panel") as area_basic_fn: with gr.Row(): for k in range(NUM_CUSTOM_BASIC_BTN): @@ -117,7 +118,7 @@ def main(): with gr.Row(): gr.Markdown("插件可读取“输入区”文本/路径作为参数(上传文件自动修正路径)") with gr.Row(elem_id="input-plugin-group"): - plugin_group_sel = gr.Dropdown(choices=all_plugin_groups, label='', show_label=False, value=DEFAULT_FN_GROUPS, + plugin_group_sel = gr.Dropdown(choices=all_plugin_groups, label='', show_label=False, value=DEFAULT_FN_GROUPS, multiselect=True, interactive=True, elem_classes='normal_mut_select').style(container=False) with gr.Row(): for k, plugin in plugins.items(): @@ -125,7 +126,7 @@ def main(): visible = True if match_group(plugin['Group'], DEFAULT_FN_GROUPS) else False variant = plugins[k]["Color"] if "Color" in plugin else "secondary" info = plugins[k].get("Info", k) - plugin['Button'] = plugins[k]['Button'] = gr.Button(k, variant=variant, + plugin['Button'] = plugins[k]['Button'] = gr.Button(k, variant=variant, visible=visible, info_str=f'函数插件区: {info}').style(size="sm") with gr.Row(): with gr.Accordion("更多函数插件", open=True): @@ -137,7 +138,7 @@ def main(): with gr.Row(): dropdown = gr.Dropdown(dropdown_fn_list, value=r"打开插件列表", label="", show_label=False).style(container=False) with gr.Row(): - plugin_advanced_arg = gr.Textbox(show_label=True, label="高级参数输入区", visible=False, + plugin_advanced_arg = gr.Textbox(show_label=True, label="高级参数输入区", visible=False, placeholder="这里是特殊函数插件的高级参数输入区").style(container=False) with gr.Row(): switchy_bt = gr.Button(r"请先从插件列表中选择", variant="secondary").style(size="sm") @@ -145,13 +146,12 @@ def main(): with gr.Accordion("点击展开“文件下载区”。", open=False) as area_file_up: file_upload = gr.Files(label="任何文件, 推荐上传压缩文件(zip, tar)", file_count="multiple", elem_id="elem_upload") - with gr.Floating(init_x="0%", init_y="0%", visible=True, width=None, drag="forbidden", elem_id="tooltip"): with gr.Row(): with gr.Tab("上传文件", elem_id="interact-panel"): gr.Markdown("请上传本地文件/压缩包供“函数插件区”功能调用。请注意: 上传文件后会自动把输入区修改为相应路径。") file_upload_2 = gr.Files(label="任何文件, 推荐上传压缩文件(zip, tar)", file_count="multiple", elem_id="elem_upload_float") - + with gr.Tab("更换模型", elem_id="interact-panel"): md_dropdown = gr.Dropdown(AVAIL_LLM_MODELS, value=LLM_MODEL, label="更换LLM模型/请求源").style(container=False) top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",) @@ -161,10 +161,11 @@ def main(): with gr.Tab("界面外观", elem_id="interact-panel"): theme_dropdown = gr.Dropdown(AVAIL_THEMES, value=THEME, label="更换UI主题").style(container=False) - checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区", "浮动输入区", "输入清除键", "插件参数区"], - value=["基础功能区", "函数插件区"], label="显示/隐藏功能区", elem_id='cbs').style(container=False) - checkboxes_2 = gr.CheckboxGroup(["自定义菜单"], - value=[], label="显示/隐藏自定义菜单", elem_id='cbsc').style(container=False) + checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区", "浮动输入区", "输入清除键", "插件参数区"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区", elem_id='cbs').style(container=False) + opt = ["自定义菜单"] + value=[] + if ADD_WAIFU: opt += ["添加Live2D形象"]; value += ["添加Live2D形象"] + checkboxes_2 = gr.CheckboxGroup(opt, value=value, label="显示/隐藏自定义菜单", elem_id='cbsc').style(container=False) dark_mode_btn = gr.Button("切换界面明暗 ☀", variant="secondary").style(size="sm") dark_mode_btn.click(None, None, None, _js=js_code_for_toggle_darkmode) with gr.Tab("帮助", elem_id="interact-panel"): @@ -181,7 +182,7 @@ def main(): submitBtn2 = gr.Button("提交", variant="primary"); submitBtn2.style(size="sm") resetBtn2 = gr.Button("重置", variant="secondary"); resetBtn2.style(size="sm") stopBtn2 = gr.Button("停止", variant="secondary"); stopBtn2.style(size="sm") - clearBtn2 = gr.Button("清除", variant="secondary", visible=False); clearBtn2.style(size="sm") + clearBtn2 = gr.Button("清除", elem_id="elem_clear2", variant="secondary", visible=False); clearBtn2.style(size="sm") with gr.Floating(init_x="20%", init_y="50%", visible=False, width="40%", drag="top") as area_customize: @@ -195,10 +196,12 @@ def main(): basic_fn_suffix = gr.Textbox(show_label=False, placeholder="输入新提示后缀", lines=4).style(container=False) with gr.Column(scale=1, min_width=70): basic_fn_confirm = gr.Button("确认并保存", variant="primary"); basic_fn_confirm.style(size="sm") - basic_fn_load = gr.Button("加载已保存", variant="primary"); basic_fn_load.style(size="sm") - def assign_btn(persistent_cookie_, cookies_, basic_btn_dropdown_, basic_fn_title, basic_fn_prefix, basic_fn_suffix): + basic_fn_clean = gr.Button("恢复默认", variant="primary"); basic_fn_clean.style(size="sm") + def assign_btn(persistent_cookie_, cookies_, basic_btn_dropdown_, basic_fn_title, basic_fn_prefix, basic_fn_suffix, clean_up=False): ret = {} + # 读取之前的自定义按钮 customize_fn_overwrite_ = cookies_['customize_fn_overwrite'] + # 更新新的自定义按钮 customize_fn_overwrite_.update({ basic_btn_dropdown_: { @@ -208,27 +211,41 @@ def main(): } } ) - cookies_.update(customize_fn_overwrite_) + if clean_up: + customize_fn_overwrite_ = {} + cookies_.update(customize_fn_overwrite_) # 更新cookie + visible = (not clean_up) and (basic_fn_title != "") if basic_btn_dropdown_ in customize_btns: - ret.update({customize_btns[basic_btn_dropdown_]: gr.update(visible=True, value=basic_fn_title)}) + # 是自定义按钮,不是预定义按钮 + ret.update({customize_btns[basic_btn_dropdown_]: gr.update(visible=visible, value=basic_fn_title)}) else: - ret.update({predefined_btns[basic_btn_dropdown_]: gr.update(visible=True, value=basic_fn_title)}) + # 是预定义按钮 + ret.update({predefined_btns[basic_btn_dropdown_]: gr.update(visible=visible, value=basic_fn_title)}) ret.update({cookies: cookies_}) try: persistent_cookie_ = from_cookie_str(persistent_cookie_) # persistent cookie to dict except: persistent_cookie_ = {} persistent_cookie_["custom_bnt"] = customize_fn_overwrite_ # dict update new value persistent_cookie_ = to_cookie_str(persistent_cookie_) # persistent cookie to dict - ret.update({persistent_cookie: persistent_cookie_}) # write persistent cookie + ret.update({py_pickle_cookie: persistent_cookie_}) # write persistent cookie return ret - - def reflesh_btn(persistent_cookie_, cookies_): + + # update btn + h = basic_fn_confirm.click(assign_btn, [py_pickle_cookie, cookies, basic_btn_dropdown, basic_fn_title, basic_fn_prefix, basic_fn_suffix], + [py_pickle_cookie, cookies, *customize_btns.values(), *predefined_btns.values()]) + h.then(None, [py_pickle_cookie], None, _js="""(py_pickle_cookie)=>{setCookie("py_pickle_cookie", py_pickle_cookie, 365);}""") + # clean up btn + h2 = basic_fn_clean.click(assign_btn, [py_pickle_cookie, cookies, basic_btn_dropdown, basic_fn_title, basic_fn_prefix, basic_fn_suffix, gr.State(True)], + [py_pickle_cookie, cookies, *customize_btns.values(), *predefined_btns.values()]) + h2.then(None, [py_pickle_cookie], None, _js="""(py_pickle_cookie)=>{setCookie("py_pickle_cookie", py_pickle_cookie, 365);}""") + + def persistent_cookie_reload(persistent_cookie_, cookies_): ret = {} for k in customize_btns: ret.update({customize_btns[k]: gr.update(visible=False, value="")}) try: persistent_cookie_ = from_cookie_str(persistent_cookie_) # persistent cookie to dict except: return ret - + customize_fn_overwrite_ = persistent_cookie_.get("custom_bnt", {}) cookies_['customize_fn_overwrite'] = customize_fn_overwrite_ ret.update({cookies: cookies_}) @@ -238,26 +255,17 @@ def main(): if k in customize_btns: ret.update({customize_btns[k]: gr.update(visible=True, value=v['Title'])}) else: ret.update({predefined_btns[k]: gr.update(visible=True, value=v['Title'])}) return ret - - basic_fn_load.click(reflesh_btn, [persistent_cookie, cookies], [cookies, *customize_btns.values(), *predefined_btns.values()]) - h = basic_fn_confirm.click(assign_btn, [persistent_cookie, cookies, basic_btn_dropdown, basic_fn_title, basic_fn_prefix, basic_fn_suffix], - [persistent_cookie, cookies, *customize_btns.values(), *predefined_btns.values()]) - # save persistent cookie - h.then(None, [persistent_cookie], None, _js="""(persistent_cookie)=>{setCookie("persistent_cookie", persistent_cookie, 5);}""") # 功能区显示开关与功能区的互动 def fn_area_visibility(a): ret = {} - ret.update({area_basic_fn: gr.update(visible=("基础功能区" in a))}) - ret.update({area_crazy_fn: gr.update(visible=("函数插件区" in a))}) ret.update({area_input_primary: gr.update(visible=("浮动输入区" not in a))}) ret.update({area_input_secondary: gr.update(visible=("浮动输入区" in a))}) - ret.update({clearBtn: gr.update(visible=("输入清除键" in a))}) - ret.update({clearBtn2: gr.update(visible=("输入清除键" in a))}) ret.update({plugin_advanced_arg: gr.update(visible=("插件参数区" in a))}) if "浮动输入区" in a: ret.update({txt: gr.update(value="")}) return ret - checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn, area_input_primary, area_input_secondary, txt, txt2, clearBtn, clearBtn2, plugin_advanced_arg] ) + checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn, area_input_primary, area_input_secondary, txt, txt2, plugin_advanced_arg] ) + checkboxes.select(None, [checkboxes], None, _js=js_code_show_or_hide) # 功能区显示开关与功能区的互动 def fn_area_visibility_2(a): @@ -265,6 +273,7 @@ def main(): ret.update({area_customize: gr.update(visible=("自定义菜单" in a))}) return ret checkboxes_2.select(fn_area_visibility_2, [checkboxes_2], [area_customize] ) + checkboxes_2.select(None, [checkboxes_2], None, _js=js_code_show_or_hide_group2) # 整理反复出现的控件句柄组合 input_combo = [cookies, max_length_sl, md_dropdown, txt, txt2, top_p, temperature, chatbot, history, system_prompt, plugin_advanced_arg] @@ -275,15 +284,17 @@ def main(): cancel_handles.append(txt2.submit(**predict_args)) cancel_handles.append(submitBtn.click(**predict_args)) cancel_handles.append(submitBtn2.click(**predict_args)) - resetBtn.click(lambda: ([], [], "已重置"), None, [chatbot, history, status]) - resetBtn2.click(lambda: ([], [], "已重置"), None, [chatbot, history, status]) - clearBtn.click(lambda: ("",""), None, [txt, txt2]) - clearBtn2.click(lambda: ("",""), None, [txt, txt2]) + resetBtn.click(None, None, [chatbot, history, status], _js=js_code_reset) # 先在前端快速清除chatbot&status + resetBtn2.click(None, None, [chatbot, history, status], _js=js_code_reset) # 先在前端快速清除chatbot&status + resetBtn.click(lambda: ([], [], "已重置"), None, [chatbot, history, status]) # 再在后端清除history + resetBtn2.click(lambda: ([], [], "已重置"), None, [chatbot, history, status]) # 再在后端清除history + clearBtn.click(None, None, [txt, txt2], _js=js_code_clear) + clearBtn2.click(None, None, [txt, txt2], _js=js_code_clear) if AUTO_CLEAR_TXT: - submitBtn.click(lambda: ("",""), None, [txt, txt2]) - submitBtn2.click(lambda: ("",""), None, [txt, txt2]) - txt.submit(lambda: ("",""), None, [txt, txt2]) - txt2.submit(lambda: ("",""), None, [txt, txt2]) + submitBtn.click(None, None, [txt, txt2], _js=js_code_clear) + submitBtn2.click(None, None, [txt, txt2], _js=js_code_clear) + txt.submit(None, None, [txt, txt2], _js=js_code_clear) + txt2.submit(None, None, [txt, txt2], _js=js_code_clear) # 基础功能区的回调函数注册 for k in functional: if ("Visible" in functional[k]) and (not functional[k]["Visible"]): continue @@ -324,7 +335,7 @@ def main(): else: css_part2 = adjust_theme()._get_theme_css() return css_part2 + css_part1 - + theme_handle = theme_dropdown.select(on_theme_dropdown_changed, [theme_dropdown, secret_css], [secret_css]) theme_handle.then( None, @@ -349,13 +360,13 @@ def main(): if not group_list: # 处理特殊情况:没有选择任何插件组 return [*[plugin['Button'].update(visible=False) for _, plugin in plugins_as_btn.items()], gr.Dropdown.update(choices=[])] for k, plugin in plugins.items(): - if plugin.get("AsButton", True): + if plugin.get("AsButton", True): btn_list.append(plugin['Button'].update(visible=match_group(plugin['Group'], group_list))) # 刷新按钮 if plugin.get('AdvancedArgs', False): dropdown_fn_list.append(k) # 对于需要高级参数的插件,亦在下拉菜单中显示 elif match_group(plugin['Group'], group_list): fns_list.append(k) # 刷新下拉列表 return [*btn_list, gr.Dropdown.update(choices=fns_list)] plugin_group_sel.select(fn=on_group_change, inputs=[plugin_group_sel], outputs=[*[plugin['Button'] for name, plugin in plugins_as_btn.items()], dropdown]) - if ENABLE_AUDIO: + if ENABLE_AUDIO: from crazy_functions.live_audio.audio_io import RealtimeAudioDistribution rad = RealtimeAudioDistribution() def deal_audio(audio, cookies): @@ -363,12 +374,12 @@ def main(): audio_mic.stream(deal_audio, inputs=[audio_mic, cookies]) - demo.load(init_cookie, inputs=[cookies, chatbot], outputs=[cookies]) - darkmode_js = js_code_for_darkmode_init - demo.load(None, inputs=None, outputs=[persistent_cookie], _js=js_code_for_persistent_cookie_init) - demo.load(None, inputs=[dark_mode], outputs=None, _js=darkmode_js) # 配置暗色主题或亮色主题 + demo.load(init_cookie, inputs=[cookies], outputs=[cookies]) + demo.load(persistent_cookie_reload, inputs = [py_pickle_cookie, cookies], + outputs = [py_pickle_cookie, cookies, *customize_btns.values(), *predefined_btns.values()], _js=js_code_for_persistent_cookie_init) + demo.load(None, inputs=[dark_mode], outputs=None, _js="""(dark_mode)=>{apply_cookie_for_checkbox(dark_mode);}""") # 配置暗色主题或亮色主题 demo.load(None, inputs=[gr.Textbox(LAYOUT, visible=False)], outputs=None, _js='(LAYOUT)=>{GptAcademicJavaScriptInit(LAYOUT);}') - + # gradio的inbrowser触发不太稳定,回滚代码到原始的浏览器打开函数 def run_delayed_tasks(): import threading, webbrowser, time @@ -379,7 +390,7 @@ def main(): def auto_updates(): time.sleep(0); auto_update() def open_browser(): time.sleep(2); webbrowser.open_new_tab(f"http://localhost:{PORT}") def warm_up_mods(): time.sleep(6); warm_up_modules() - + threading.Thread(target=auto_updates, name="self-upgrade", daemon=True).start() # 查看自动更新 threading.Thread(target=open_browser, name="open-browser", daemon=True).start() # 打开浏览器页面 threading.Thread(target=warm_up_mods, name="warm-up", daemon=True).start() # 预热tiktoken模块 @@ -390,10 +401,10 @@ def main(): # 如果需要在二级路径下运行 # CUSTOM_PATH = get_conf('CUSTOM_PATH') - # if CUSTOM_PATH != "/": + # if CUSTOM_PATH != "/": # from toolbox import run_gradio_in_subpath # run_gradio_in_subpath(demo, auth=AUTHENTICATION, port=PORT, custom_path=CUSTOM_PATH) - # else: + # else: # demo.launch(server_name="0.0.0.0", server_port=PORT, auth=AUTHENTICATION, favicon_path="docs/logo.png", # blocked_paths=["config.py","config_private.py","docker-compose.yml","Dockerfile",f"{PATH_LOGGING}/admin"]) diff --git a/config.py b/config.py index c9d3ec5d0bf66f9d6e9f5984f19f17a72f75a8ef..43d87c6b2c3a512a3a3b99ff59a5bfcb3fde0a53 100644 --- a/config.py +++ b/config.py @@ -2,8 +2,8 @@ 以下所有配置也都支持利用环境变量覆写,环境变量配置格式见docker-compose.yml。 读取优先级:环境变量 > config_private.py > config.py --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- - All the following configurations also support using environment variables to override, - and the environment variable configuration format can be seen in docker-compose.yml. + All the following configurations also support using environment variables to override, + and the environment variable configuration format can be seen in docker-compose.yml. Configuration reading priority: environment variable > config_private.py > config.py """ @@ -37,7 +37,7 @@ else: # ------------------------------------ 以下配置可以优化体验, 但大部分场合下并不需要修改 ------------------------------------ # 重新URL重新定向,实现更换API_URL的作用(高危设置! 常规情况下不要修改! 通过修改此设置,您将把您的API-KEY和对话隐私完全暴露给您设定的中间人!) -# 格式: API_URL_REDIRECT = {"https://api.openai.com/v1/chat/completions": "在这里填写重定向的api.openai.com的URL"} +# 格式: API_URL_REDIRECT = {"https://api.openai.com/v1/chat/completions": "在这里填写重定向的api.openai.com的URL"} # 举例: API_URL_REDIRECT = {"https://api.openai.com/v1/chat/completions": "https://reverse-proxy-url/v1/chat/completions"} API_URL_REDIRECT = {} @@ -93,14 +93,14 @@ DEFAULT_FN_GROUPS = ['对话', '编程', '学术', '智能体'] # 模型选择是 (注意: LLM_MODEL是默认选中的模型, 它*必须*被包含在AVAIL_LLM_MODELS列表中 ) -LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓ -AVAIL_LLM_MODELS = ["gpt-3.5-turbo-1106","gpt-4-1106-preview","gpt-4-vision-preview", - "gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", - "gpt-4", "gpt-4-32k", "azure-gpt-4", "api2d-gpt-4", - "gemini-pro", "chatglm3", "claude-2", "zhipuai"] +LLM_MODEL = "gpt-3.5-turbo-16k" # 可选 ↓↓↓ +AVAIL_LLM_MODELS = ["gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-vision-preview", + "gpt-3.5-turbo-1106", "gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", + "gpt-4", "gpt-4-32k", "azure-gpt-4", "glm-4", "glm-3-turbo", + "gemini-pro", "chatglm3", "claude-2"] # P.S. 其他可用的模型还包括 [ # "moss", "qwen-turbo", "qwen-plus", "qwen-max" -# "zhipuai", "qianfan", "deepseekcoder", "llama2", "qwen-local", "gpt-3.5-turbo-0613", +# "zhipuai", "qianfan", "deepseekcoder", "llama2", "qwen-local", "gpt-3.5-turbo-0613", # "gpt-3.5-turbo-16k-0613", "gpt-3.5-random", "api2d-gpt-3.5-turbo", 'api2d-gpt-3.5-turbo-16k', # "spark", "sparkv2", "sparkv3", "chatglm_onnx", "claude-1-100k", "claude-2", "internlm", "jittorllms_pangualpha", "jittorllms_llama" # ] @@ -165,7 +165,7 @@ API_ORG = "" # 如果需要使用Slack Claude,使用教程详情见 request_llms/README.md -SLACK_CLAUDE_BOT_ID = '' +SLACK_CLAUDE_BOT_ID = '' SLACK_CLAUDE_USER_TOKEN = '' @@ -202,7 +202,7 @@ XFYUN_API_KEY = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" # 接入智谱大模型 ZHIPUAI_API_KEY = "" -ZHIPUAI_MODEL = "glm-4" # 可选 "glm-3-turbo" "glm-4" +ZHIPUAI_MODEL = "" # 此选项已废弃,不再需要填写 # # 火山引擎YUNQUE大模型 @@ -215,6 +215,11 @@ ZHIPUAI_MODEL = "glm-4" # 可选 "glm-3-turbo" "glm-4" ANTHROPIC_API_KEY = "" +# Mathpix 拥有执行PDF的OCR功能,但是需要注册账号 +MATHPIX_APPID = "" +MATHPIX_APPKEY = "" + + # 自定义API KEY格式 CUSTOM_API_KEY_PATTERN = "" @@ -231,8 +236,8 @@ HUGGINGFACE_ACCESS_TOKEN = "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" # 获取方法:复制以下空间https://huggingface.co/spaces/qingxu98/grobid,设为public,然后GROBID_URL = "https://(你的hf用户名如qingxu98)-(你的填写的空间名如grobid).hf.space" GROBID_URLS = [ "https://qingxu98-grobid.hf.space","https://qingxu98-grobid2.hf.space","https://qingxu98-grobid3.hf.space", - "https://qingxu98-grobid4.hf.space","https://qingxu98-grobid5.hf.space", "https://qingxu98-grobid6.hf.space", - "https://qingxu98-grobid7.hf.space", "https://qingxu98-grobid8.hf.space", + "https://qingxu98-grobid4.hf.space","https://qingxu98-grobid5.hf.space", "https://qingxu98-grobid6.hf.space", + "https://qingxu98-grobid7.hf.space", "https://qingxu98-grobid8.hf.space", ] @@ -253,7 +258,7 @@ PATH_LOGGING = "gpt_log" # 除了连接OpenAI之外,还有哪些场合允许使用代理,请勿修改 -WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid", +WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid", "Warmup_Modules", "Nougat_Download", "AutoGen"] @@ -304,9 +309,8 @@ NUM_CUSTOM_BASIC_BTN = 4 │ ├── BAIDU_CLOUD_API_KEY │ └── BAIDU_CLOUD_SECRET_KEY │ -├── "zhipuai" 智谱AI大模型chatglm_turbo -│ ├── ZHIPUAI_API_KEY -│ └── ZHIPUAI_MODEL +├── "glm-4", "glm-3-turbo", "zhipuai" 智谱AI大模型 +│ └── ZHIPUAI_API_KEY │ ├── "qwen-turbo" 等通义千问大模型 │ └── DASHSCOPE_API_KEY @@ -318,7 +322,7 @@ NUM_CUSTOM_BASIC_BTN = 4 ├── NEWBING_STYLE └── NEWBING_COOKIES - + 本地大模型示意图 │ ├── "chatglm3" @@ -358,6 +362,9 @@ NUM_CUSTOM_BASIC_BTN = 4 │ └── ALIYUN_SECRET │ └── PDF文档精准解析 - └── GROBID_URLS + ├── GROBID_URLS + ├── MATHPIX_APPID + └── MATHPIX_APPKEY + """ diff --git a/core_functional.py b/core_functional.py index 0b283a8970f5e35627f68889a381653f796ff26f..4074cddb27b4f10c86b803df37005f516bfd8f58 100644 --- a/core_functional.py +++ b/core_functional.py @@ -3,18 +3,27 @@ # 'stop' 颜色对应 theme.py 中的 color_er import importlib from toolbox import clear_line_break +from toolbox import apply_gpt_academic_string_mask_langbased +from toolbox import build_gpt_academic_masked_string_langbased from textwrap import dedent def get_core_functions(): return { - "英语学术润色": { - # [1*] 前缀,会被加在你的输入之前。例如,用来描述你的要求,例如翻译、解释代码、润色等等 - "Prefix": r"Below is a paragraph from an academic paper. Polish the writing to meet the academic style, " - r"improve the spelling, grammar, clarity, concision and overall readability. When necessary, rewrite the whole sentence. " - r"Firstly, you should provide the polished paragraph. " - r"Secondly, you should list all your modification and explain the reasons to do so in markdown table." + "\n\n", - # [2*] 后缀,会被加在你的输入之后。例如,配合前缀可以把你的输入内容用引号圈起来 + "学术语料润色": { + # [1*] 前缀字符串,会被加在你的输入之前。例如,用来描述你的要求,例如翻译、解释代码、润色等等。 + # 这里填一个提示词字符串就行了,这里为了区分中英文情景搞复杂了一点 + "Prefix": build_gpt_academic_masked_string_langbased( + text_show_english= + r"Below is a paragraph from an academic paper. Polish the writing to meet the academic style, " + r"improve the spelling, grammar, clarity, concision and overall readability. When necessary, rewrite the whole sentence. " + r"Firstly, you should provide the polished paragraph. " + r"Secondly, you should list all your modification and explain the reasons to do so in markdown table.", + text_show_chinese= + r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," + r"同时分解长句,减少重复,并提供改进建议。请先提供文本的更正版本,然后在markdown表格中列出修改的内容,并给出修改的理由:" + ) + "\n\n", + # [2*] 后缀字符串,会被加在你的输入之后。例如,配合前缀可以把你的输入内容用引号圈起来 "Suffix": r"", # [3] 按钮颜色 (可选参数,默认 secondary) "Color": r"secondary", @@ -32,8 +41,10 @@ def get_core_functions(): "Prefix": r"", # 后缀,会被加在你的输入之后。例如,配合前缀可以把你的输入内容用引号圈起来 "Suffix": + # dedent() 函数用于去除多行字符串的缩进 dedent("\n"+r''' ============================== + 使用mermaid flowchart对以上文本进行总结,概括上述段落的内容以及内在逻辑关系,例如: 以下是对以上文本的总结,以mermaid flowchart的形式展示: @@ -83,14 +94,22 @@ def get_core_functions(): "学术英中互译": { - "Prefix": r"I want you to act as a scientific English-Chinese translator, " + - r"I will provide you with some paragraphs in one language " + - r"and your task is to accurately and academically translate the paragraphs only into the other language. " + - r"Do not repeat the original provided paragraphs after translation. " + - r"You should use artificial intelligence tools, " + - r"such as natural language processing, and rhetorical knowledge " + - r"and experience about effective writing techniques to reply. " + - r"I'll give you my paragraphs as follows, tell me what language it is written in, and then translate:" + "\n\n", + "Prefix": build_gpt_academic_masked_string_langbased( + text_show_chinese= + r"I want you to act as a scientific English-Chinese translator, " + r"I will provide you with some paragraphs in one language " + r"and your task is to accurately and academically translate the paragraphs only into the other language. " + r"Do not repeat the original provided paragraphs after translation. " + r"You should use artificial intelligence tools, " + r"such as natural language processing, and rhetorical knowledge " + r"and experience about effective writing techniques to reply. " + r"I'll give you my paragraphs as follows, tell me what language it is written in, and then translate:", + text_show_english= + r"你是经验丰富的翻译,请把以下学术文章段落翻译成中文," + r"并同时充分考虑中文的语法、清晰、简洁和整体可读性," + r"必要时,你可以修改整个句子的顺序以确保翻译后的段落符合中文的语言习惯。" + r"你需要翻译的文本如下:" + ) + "\n\n", "Suffix": r"", }, @@ -140,7 +159,11 @@ def handle_core_functionality(additional_fn, inputs, history, chatbot): if "PreProcess" in core_functional[additional_fn]: if core_functional[additional_fn]["PreProcess"] is not None: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话) - inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] + # 为字符串加上上面定义的前缀和后缀。 + inputs = apply_gpt_academic_string_mask_langbased( + string = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"], + lang_reference = inputs, + ) if core_functional[additional_fn].get("AutoClearHistory", False): history = [] return inputs, history diff --git a/crazy_functional.py b/crazy_functional.py index 45400731c519d5f82570fd2449c20961cc20d2d3..3e998e56fce91582ab89d2c7e7b41eb94eabdf8d 100644 --- a/crazy_functional.py +++ b/crazy_functional.py @@ -32,10 +32,9 @@ def get_crazy_functions(): from crazy_functions.理解PDF文档内容 import 理解PDF文档内容标准文件输入 from crazy_functions.Latex全文润色 import Latex中文润色 from crazy_functions.Latex全文润色 import Latex英文纠错 - from crazy_functions.Latex全文翻译 import Latex中译英 - from crazy_functions.Latex全文翻译 import Latex英译中 from crazy_functions.批量Markdown翻译 import Markdown中译英 from crazy_functions.虚空终端 import 虚空终端 + from crazy_functions.生成多种Mermaid图表 import 生成多种Mermaid图表 function_plugins = { "虚空终端": { @@ -71,6 +70,15 @@ def get_crazy_functions(): "Info": "清除所有缓存文件,谨慎操作 | 不需要输入参数", "Function": HotReload(清除缓存), }, + "生成多种Mermaid图表(从当前对话或路径(.pdf/.md/.docx)中生产图表)": { + "Group": "对话", + "Color": "stop", + "AsButton": False, + "Info" : "基于当前对话或文件生成多种Mermaid图表,图表类型由模型判断", + "Function": HotReload(生成多种Mermaid图表), + "AdvancedArgs": True, + "ArgsReminder": "请输入图类型对应的数字,不输入则为模型自行判断:1-流程图,2-序列图,3-类图,4-饼图,5-甘特图,6-状态图,7-实体关系图,8-象限提示图,9-思维导图", + }, "批量总结Word文档": { "Group": "学术", "Color": "stop", @@ -237,13 +245,7 @@ def get_crazy_functions(): "Info": "对英文Latex项目全文进行润色处理 | 输入参数为路径或上传压缩包", "Function": HotReload(Latex英文润色), }, - "英文Latex项目全文纠错(输入路径或上传压缩包)": { - "Group": "学术", - "Color": "stop", - "AsButton": False, # 加入下拉菜单中 - "Info": "对英文Latex项目全文进行纠错处理 | 输入参数为路径或上传压缩包", - "Function": HotReload(Latex英文纠错), - }, + "中文Latex项目全文润色(输入路径或上传压缩包)": { "Group": "学术", "Color": "stop", @@ -252,6 +254,14 @@ def get_crazy_functions(): "Function": HotReload(Latex中文润色), }, # 已经被新插件取代 + # "英文Latex项目全文纠错(输入路径或上传压缩包)": { + # "Group": "学术", + # "Color": "stop", + # "AsButton": False, # 加入下拉菜单中 + # "Info": "对英文Latex项目全文进行纠错处理 | 输入参数为路径或上传压缩包", + # "Function": HotReload(Latex英文纠错), + # }, + # 已经被新插件取代 # "Latex项目全文中译英(输入路径或上传压缩包)": { # "Group": "学术", # "Color": "stop", @@ -522,7 +532,9 @@ def get_crazy_functions(): print("Load function plugin failed") try: - from crazy_functions.Latex输出PDF结果 import Latex英文纠错加PDF对比 + from crazy_functions.Latex输出PDF import Latex英文纠错加PDF对比 + from crazy_functions.Latex输出PDF import Latex翻译中文并重新编译PDF + from crazy_functions.Latex输出PDF import PDF翻译中文并重新编译PDF function_plugins.update( { @@ -533,38 +545,39 @@ def get_crazy_functions(): "AdvancedArgs": True, "ArgsReminder": "如果有必要, 请在此处追加更细致的矫错指令(使用英文)。", "Function": HotReload(Latex英文纠错加PDF对比), - } - } - ) - from crazy_functions.Latex输出PDF结果 import Latex翻译中文并重新编译PDF - - function_plugins.update( - { + }, "Arxiv论文精细翻译(输入arxivID)[需Latex]": { "Group": "学术", "Color": "stop", "AsButton": False, "AdvancedArgs": True, - "ArgsReminder": "如果有必要, 请在此处给出自定义翻译命令, 解决部分词汇翻译不准确的问题。 " - + "例如当单词'agent'翻译不准确时, 请尝试把以下指令复制到高级参数区: " - + 'If the term "agent" is used in this section, it should be translated to "智能体". ', + "ArgsReminder": r"如果有必要, 请在此处给出自定义翻译命令, 解决部分词汇翻译不准确的问题。 " + r"例如当单词'agent'翻译不准确时, 请尝试把以下指令复制到高级参数区: " + r'If the term "agent" is used in this section, it should be translated to "智能体". ', "Info": "Arixv论文精细翻译 | 输入参数arxiv论文的ID,比如1812.10695", "Function": HotReload(Latex翻译中文并重新编译PDF), - } - } - ) - function_plugins.update( - { + }, "本地Latex论文精细翻译(上传Latex项目)[需Latex]": { "Group": "学术", "Color": "stop", "AsButton": False, "AdvancedArgs": True, - "ArgsReminder": "如果有必要, 请在此处给出自定义翻译命令, 解决部分词汇翻译不准确的问题。 " - + "例如当单词'agent'翻译不准确时, 请尝试把以下指令复制到高级参数区: " - + 'If the term "agent" is used in this section, it should be translated to "智能体". ', + "ArgsReminder": r"如果有必要, 请在此处给出自定义翻译命令, 解决部分词汇翻译不准确的问题。 " + r"例如当单词'agent'翻译不准确时, 请尝试把以下指令复制到高级参数区: " + r'If the term "agent" is used in this section, it should be translated to "智能体". ', "Info": "本地Latex论文精细翻译 | 输入参数是路径", "Function": HotReload(Latex翻译中文并重新编译PDF), + }, + "PDF翻译中文并重新编译PDF(上传PDF)[需Latex]": { + "Group": "学术", + "Color": "stop", + "AsButton": False, + "AdvancedArgs": True, + "ArgsReminder": r"如果有必要, 请在此处给出自定义翻译命令, 解决部分词汇翻译不准确的问题。 " + r"例如当单词'agent'翻译不准确时, 请尝试把以下指令复制到高级参数区: " + r'If the term "agent" is used in this section, it should be translated to "智能体". ', + "Info": "PDF翻译中文,并重新编译PDF | 输入参数为路径", + "Function": HotReload(PDF翻译中文并重新编译PDF) } } ) diff --git "a/crazy_functions/Latex\345\205\250\346\226\207\346\266\246\350\211\262.py" "b/crazy_functions/Latex\345\205\250\346\226\207\346\266\246\350\211\262.py" index b736fe896979cf3c8b08910c8bb21bfb4809c9a4..3bd0613d4dcf7fd8b535e6a857b14130f85b2df9 100644 --- "a/crazy_functions/Latex\345\205\250\346\226\207\346\266\246\350\211\262.py" +++ "b/crazy_functions/Latex\345\205\250\346\226\207\346\266\246\350\211\262.py" @@ -135,11 +135,11 @@ def 多文件润色(file_manifest, project_folder, llm_kwargs, plugin_kwargs, ch @CatchException -def Latex英文润色(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def Latex英文润色(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", - "对整个Latex项目进行润色。函数插件贡献者: Binary-Husky。(注意,此插件不调用Latex,如果有Latex环境,请使用“Latex英文纠错+高亮”插件)"]) + "对整个Latex项目进行润色。函数插件贡献者: Binary-Husky。(注意,此插件不调用Latex,如果有Latex环境,请使用「Latex英文纠错+高亮修正位置(需Latex)插件」"]) yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 尝试导入依赖,如果缺少依赖,则给出安装建议 @@ -173,7 +173,7 @@ def Latex英文润色(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_p @CatchException -def Latex中文润色(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def Latex中文润色(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", @@ -209,7 +209,7 @@ def Latex中文润色(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_p @CatchException -def Latex英文纠错(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def Latex英文纠错(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", diff --git "a/crazy_functions/Latex\345\205\250\346\226\207\347\277\273\350\257\221.py" "b/crazy_functions/Latex\345\205\250\346\226\207\347\277\273\350\257\221.py" index 49470c864e59b790b09789b97227e7b00768ccfd..d6c3b5edc30085397548128f9de0b55f22d593e2 100644 --- "a/crazy_functions/Latex\345\205\250\346\226\207\347\277\273\350\257\221.py" +++ "b/crazy_functions/Latex\345\205\250\346\226\207\347\277\273\350\257\221.py" @@ -106,7 +106,7 @@ def 多文件翻译(file_manifest, project_folder, llm_kwargs, plugin_kwargs, ch @CatchException -def Latex英译中(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def Latex英译中(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", @@ -143,7 +143,7 @@ def Latex英译中(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prom @CatchException -def Latex中译英(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def Latex中译英(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", diff --git "a/crazy_functions/Latex\350\276\223\345\207\272PDF.py" "b/crazy_functions/Latex\350\276\223\345\207\272PDF.py" new file mode 100644 index 0000000000000000000000000000000000000000..fc878f9ff078bd92e48033e981159aa17a02cf2a --- /dev/null +++ "b/crazy_functions/Latex\350\276\223\345\207\272PDF.py" @@ -0,0 +1,484 @@ +from toolbox import update_ui, trimmed_format_exc, get_conf, get_log_folder, promote_file_to_downloadzone +from toolbox import CatchException, report_exception, update_ui_lastest_msg, zip_result, gen_time_str +from functools import partial +import glob, os, requests, time, json, tarfile + +pj = os.path.join +ARXIV_CACHE_DIR = os.path.expanduser(f"~/arxiv_cache/") + + +# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- 工具函数 =-=-=-=-=-=-=-=-=-=-=-=-=-=-=- +# 专业词汇声明 = 'If the term "agent" is used in this section, it should be translated to "智能体". ' +def switch_prompt(pfg, mode, more_requirement): + """ + Generate prompts and system prompts based on the mode for proofreading or translating. + Args: + - pfg: Proofreader or Translator instance. + - mode: A string specifying the mode, either 'proofread' or 'translate_zh'. + + Returns: + - inputs_array: A list of strings containing prompts for users to respond to. + - sys_prompt_array: A list of strings containing prompts for system prompts. + """ + n_split = len(pfg.sp_file_contents) + if mode == 'proofread_en': + inputs_array = [r"Below is a section from an academic paper, proofread this section." + + r"Do not modify any latex command such as \section, \cite, \begin, \item and equations. " + more_requirement + + r"Answer me only with the revised text:" + + f"\n\n{frag}" for frag in pfg.sp_file_contents] + sys_prompt_array = ["You are a professional academic paper writer." for _ in range(n_split)] + elif mode == 'translate_zh': + inputs_array = [ + r"Below is a section from an English academic paper, translate it into Chinese. " + more_requirement + + r"Do not modify any latex command such as \section, \cite, \begin, \item and equations. " + + r"Answer me only with the translated text:" + + f"\n\n{frag}" for frag in pfg.sp_file_contents] + sys_prompt_array = ["You are a professional translator." for _ in range(n_split)] + else: + assert False, "未知指令" + return inputs_array, sys_prompt_array + + +def desend_to_extracted_folder_if_exist(project_folder): + """ + Descend into the extracted folder if it exists, otherwise return the original folder. + + Args: + - project_folder: A string specifying the folder path. + + Returns: + - A string specifying the path to the extracted folder, or the original folder if there is no extracted folder. + """ + maybe_dir = [f for f in glob.glob(f'{project_folder}/*') if os.path.isdir(f)] + if len(maybe_dir) == 0: return project_folder + if maybe_dir[0].endswith('.extract'): return maybe_dir[0] + return project_folder + + +def move_project(project_folder, arxiv_id=None): + """ + Create a new work folder and copy the project folder to it. + + Args: + - project_folder: A string specifying the folder path of the project. + + Returns: + - A string specifying the path to the new work folder. + """ + import shutil, time + time.sleep(2) # avoid time string conflict + if arxiv_id is not None: + new_workfolder = pj(ARXIV_CACHE_DIR, arxiv_id, 'workfolder') + else: + new_workfolder = f'{get_log_folder()}/{gen_time_str()}' + try: + shutil.rmtree(new_workfolder) + except: + pass + + # align subfolder if there is a folder wrapper + items = glob.glob(pj(project_folder, '*')) + items = [item for item in items if os.path.basename(item) != '__MACOSX'] + if len(glob.glob(pj(project_folder, '*.tex'))) == 0 and len(items) == 1: + if os.path.isdir(items[0]): project_folder = items[0] + + shutil.copytree(src=project_folder, dst=new_workfolder) + return new_workfolder + + +def arxiv_download(chatbot, history, txt, allow_cache=True): + def check_cached_translation_pdf(arxiv_id): + translation_dir = pj(ARXIV_CACHE_DIR, arxiv_id, 'translation') + if not os.path.exists(translation_dir): + os.makedirs(translation_dir) + target_file = pj(translation_dir, 'translate_zh.pdf') + if os.path.exists(target_file): + promote_file_to_downloadzone(target_file, rename_file=None, chatbot=chatbot) + target_file_compare = pj(translation_dir, 'comparison.pdf') + if os.path.exists(target_file_compare): + promote_file_to_downloadzone(target_file_compare, rename_file=None, chatbot=chatbot) + return target_file + return False + + def is_float(s): + try: + float(s) + return True + except ValueError: + return False + + if ('.' in txt) and ('/' not in txt) and is_float(txt): # is arxiv ID + txt = 'https://arxiv.org/abs/' + txt.strip() + if ('.' in txt) and ('/' not in txt) and is_float(txt[:10]): # is arxiv ID + txt = 'https://arxiv.org/abs/' + txt[:10] + + if not txt.startswith('https://arxiv.org'): + return txt, None # 是本地文件,跳过下载 + + # <-------------- inspect format -------------> + chatbot.append([f"检测到arxiv文档连接", '尝试下载 ...']) + yield from update_ui(chatbot=chatbot, history=history) + time.sleep(1) # 刷新界面 + + url_ = txt # https://arxiv.org/abs/1707.06690 + if not txt.startswith('https://arxiv.org/abs/'): + msg = f"解析arxiv网址失败, 期望格式例如: https://arxiv.org/abs/1707.06690。实际得到格式: {url_}。" + yield from update_ui_lastest_msg(msg, chatbot=chatbot, history=history) # 刷新界面 + return msg, None + # <-------------- set format -------------> + arxiv_id = url_.split('/abs/')[-1] + if 'v' in arxiv_id: arxiv_id = arxiv_id[:10] + cached_translation_pdf = check_cached_translation_pdf(arxiv_id) + if cached_translation_pdf and allow_cache: return cached_translation_pdf, arxiv_id + + url_tar = url_.replace('/abs/', '/e-print/') + translation_dir = pj(ARXIV_CACHE_DIR, arxiv_id, 'e-print') + extract_dst = pj(ARXIV_CACHE_DIR, arxiv_id, 'extract') + os.makedirs(translation_dir, exist_ok=True) + + # <-------------- download arxiv source file -------------> + dst = pj(translation_dir, arxiv_id + '.tar') + if os.path.exists(dst): + yield from update_ui_lastest_msg("调用缓存", chatbot=chatbot, history=history) # 刷新界面 + else: + yield from update_ui_lastest_msg("开始下载", chatbot=chatbot, history=history) # 刷新界面 + proxies = get_conf('proxies') + r = requests.get(url_tar, proxies=proxies) + with open(dst, 'wb+') as f: + f.write(r.content) + # <-------------- extract file -------------> + yield from update_ui_lastest_msg("下载完成", chatbot=chatbot, history=history) # 刷新界面 + from toolbox import extract_archive + extract_archive(file_path=dst, dest_dir=extract_dst) + return extract_dst, arxiv_id + + +def pdf2tex_project(pdf_file_path): + # Mathpix API credentials + app_id, app_key = get_conf('MATHPIX_APPID', 'MATHPIX_APPKEY') + headers = {"app_id": app_id, "app_key": app_key} + + # Step 1: Send PDF file for processing + options = { + "conversion_formats": {"tex.zip": True}, + "math_inline_delimiters": ["$", "$"], + "rm_spaces": True + } + + response = requests.post(url="https://api.mathpix.com/v3/pdf", + headers=headers, + data={"options_json": json.dumps(options)}, + files={"file": open(pdf_file_path, "rb")}) + + if response.ok: + pdf_id = response.json()["pdf_id"] + print(f"PDF processing initiated. PDF ID: {pdf_id}") + + # Step 2: Check processing status + while True: + conversion_response = requests.get(f"https://api.mathpix.com/v3/pdf/{pdf_id}", headers=headers) + conversion_data = conversion_response.json() + + if conversion_data["status"] == "completed": + print("PDF processing completed.") + break + elif conversion_data["status"] == "error": + print("Error occurred during processing.") + else: + print(f"Processing status: {conversion_data['status']}") + time.sleep(5) # wait for a few seconds before checking again + + # Step 3: Save results to local files + output_dir = os.path.join(os.path.dirname(pdf_file_path), 'mathpix_output') + if not os.path.exists(output_dir): + os.makedirs(output_dir) + + url = f"https://api.mathpix.com/v3/pdf/{pdf_id}.tex" + response = requests.get(url, headers=headers) + file_name_wo_dot = '_'.join(os.path.basename(pdf_file_path).split('.')[:-1]) + output_name = f"{file_name_wo_dot}.tex.zip" + output_path = os.path.join(output_dir, output_name) + with open(output_path, "wb") as output_file: + output_file.write(response.content) + print(f"tex.zip file saved at: {output_path}") + + import zipfile + unzip_dir = os.path.join(output_dir, file_name_wo_dot) + with zipfile.ZipFile(output_path, 'r') as zip_ref: + zip_ref.extractall(unzip_dir) + + return unzip_dir + + else: + print(f"Error sending PDF for processing. Status code: {response.status_code}") + return None + + +# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= 插件主程序1 =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= + + +@CatchException +def Latex英文纠错加PDF对比(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): + # <-------------- information about this plugin -------------> + chatbot.append(["函数插件功能?", + "对整个Latex项目进行纠错, 用latex编译为PDF对修正处做高亮。函数插件贡献者: Binary-Husky。注意事项: 目前仅支持GPT3.5/GPT4,其他模型转化效果未知。目前对机器学习类文献转化效果最好,其他类型文献转化效果未知。仅在Windows系统进行了测试,其他操作系统表现未知。"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + + # <-------------- more requirements -------------> + if ("advanced_arg" in plugin_kwargs) and (plugin_kwargs["advanced_arg"] == ""): plugin_kwargs.pop("advanced_arg") + more_req = plugin_kwargs.get("advanced_arg", "") + _switch_prompt_ = partial(switch_prompt, more_requirement=more_req) + + # <-------------- check deps -------------> + try: + import glob, os, time, subprocess + subprocess.Popen(['pdflatex', '-version']) + from .latex_fns.latex_actions import Latex精细分解与转化, 编译Latex + except Exception as e: + chatbot.append([f"解析项目: {txt}", + f"尝试执行Latex指令失败。Latex没有安装, 或者不在环境变量PATH中。安装方法https://tug.org/texlive/。报错信息\n\n```\n\n{trimmed_format_exc()}\n\n```\n\n"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # <-------------- clear history and read input -------------> + history = [] + if os.path.exists(txt): + project_folder = txt + else: + if txt == "": txt = '空空如也的输入栏' + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.tex', recursive=True)] + if len(file_manifest) == 0: + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.tex文件: {txt}") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # <-------------- if is a zip/tar file -------------> + project_folder = desend_to_extracted_folder_if_exist(project_folder) + + # <-------------- move latex project away from temp folder -------------> + project_folder = move_project(project_folder, arxiv_id=None) + + # <-------------- if merge_translate_zh is already generated, skip gpt req -------------> + if not os.path.exists(project_folder + '/merge_proofread_en.tex'): + yield from Latex精细分解与转化(file_manifest, project_folder, llm_kwargs, plugin_kwargs, + chatbot, history, system_prompt, mode='proofread_en', + switch_prompt=_switch_prompt_) + + # <-------------- compile PDF -------------> + success = yield from 编译Latex(chatbot, history, main_file_original='merge', + main_file_modified='merge_proofread_en', + work_folder_original=project_folder, work_folder_modified=project_folder, + work_folder=project_folder) + + # <-------------- zip PDF -------------> + zip_res = zip_result(project_folder) + if success: + chatbot.append((f"成功啦", '请查收结果(压缩包)...')) + yield from update_ui(chatbot=chatbot, history=history); + time.sleep(1) # 刷新界面 + promote_file_to_downloadzone(file=zip_res, chatbot=chatbot) + else: + chatbot.append((f"失败了", + '虽然PDF生成失败了, 但请查收结果(压缩包), 内含已经翻译的Tex文档, 也是可读的, 您可以到Github Issue区, 用该压缩包+对话历史存档进行反馈 ...')) + yield from update_ui(chatbot=chatbot, history=history); + time.sleep(1) # 刷新界面 + promote_file_to_downloadzone(file=zip_res, chatbot=chatbot) + + # <-------------- we are done -------------> + return success + + +# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= 插件主程序2 =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= + +@CatchException +def Latex翻译中文并重新编译PDF(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): + # <-------------- information about this plugin -------------> + chatbot.append([ + "函数插件功能?", + "对整个Latex项目进行翻译, 生成中文PDF。函数插件贡献者: Binary-Husky。注意事项: 此插件Windows支持最佳,Linux下必须使用Docker安装,详见项目主README.md。目前仅支持GPT3.5/GPT4,其他模型转化效果未知。目前对机器学习类文献转化效果最好,其他类型文献转化效果未知。"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + + # <-------------- more requirements -------------> + if ("advanced_arg" in plugin_kwargs) and (plugin_kwargs["advanced_arg"] == ""): plugin_kwargs.pop("advanced_arg") + more_req = plugin_kwargs.get("advanced_arg", "") + no_cache = more_req.startswith("--no-cache") + if no_cache: more_req.lstrip("--no-cache") + allow_cache = not no_cache + _switch_prompt_ = partial(switch_prompt, more_requirement=more_req) + + # <-------------- check deps -------------> + try: + import glob, os, time, subprocess + subprocess.Popen(['pdflatex', '-version']) + from .latex_fns.latex_actions import Latex精细分解与转化, 编译Latex + except Exception as e: + chatbot.append([f"解析项目: {txt}", + f"尝试执行Latex指令失败。Latex没有安装, 或者不在环境变量PATH中。安装方法https://tug.org/texlive/。报错信息\n\n```\n\n{trimmed_format_exc()}\n\n```\n\n"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # <-------------- clear history and read input -------------> + history = [] + try: + txt, arxiv_id = yield from arxiv_download(chatbot, history, txt, allow_cache) + except tarfile.ReadError as e: + yield from update_ui_lastest_msg( + "无法自动下载该论文的Latex源码,请前往arxiv打开此论文下载页面,点other Formats,然后download source手动下载latex源码包。接下来调用本地Latex翻译插件即可。", + chatbot=chatbot, history=history) + return + + if txt.endswith('.pdf'): + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"发现已经存在翻译好的PDF文档") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + if os.path.exists(txt): + project_folder = txt + else: + if txt == "": txt = '空空如也的输入栏' + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无法处理: {txt}") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.tex', recursive=True)] + if len(file_manifest) == 0: + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.tex文件: {txt}") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # <-------------- if is a zip/tar file -------------> + project_folder = desend_to_extracted_folder_if_exist(project_folder) + + # <-------------- move latex project away from temp folder -------------> + project_folder = move_project(project_folder, arxiv_id) + + # <-------------- if merge_translate_zh is already generated, skip gpt req -------------> + if not os.path.exists(project_folder + '/merge_translate_zh.tex'): + yield from Latex精细分解与转化(file_manifest, project_folder, llm_kwargs, plugin_kwargs, + chatbot, history, system_prompt, mode='translate_zh', + switch_prompt=_switch_prompt_) + + # <-------------- compile PDF -------------> + success = yield from 编译Latex(chatbot, history, main_file_original='merge', + main_file_modified='merge_translate_zh', mode='translate_zh', + work_folder_original=project_folder, work_folder_modified=project_folder, + work_folder=project_folder) + + # <-------------- zip PDF -------------> + zip_res = zip_result(project_folder) + if success: + chatbot.append((f"成功啦", '请查收结果(压缩包)...')) + yield from update_ui(chatbot=chatbot, history=history); + time.sleep(1) # 刷新界面 + promote_file_to_downloadzone(file=zip_res, chatbot=chatbot) + else: + chatbot.append((f"失败了", + '虽然PDF生成失败了, 但请查收结果(压缩包), 内含已经翻译的Tex文档, 您可以到Github Issue区, 用该压缩包进行反馈。如系统是Linux,请检查系统字体(见Github wiki) ...')) + yield from update_ui(chatbot=chatbot, history=history); + time.sleep(1) # 刷新界面 + promote_file_to_downloadzone(file=zip_res, chatbot=chatbot) + + # <-------------- we are done -------------> + return success + + +# =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- 插件主程序3 =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= + +@CatchException +def PDF翻译中文并重新编译PDF(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + # <-------------- information about this plugin -------------> + chatbot.append([ + "函数插件功能?", + "将PDF转换为Latex项目,翻译为中文后重新编译为PDF。函数插件贡献者: Marroh。注意事项: 此插件Windows支持最佳,Linux下必须使用Docker安装,详见项目主README.md。目前仅支持GPT3.5/GPT4,其他模型转化效果未知。目前对机器学习类文献转化效果最好,其他类型文献转化效果未知。"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + + # <-------------- more requirements -------------> + if ("advanced_arg" in plugin_kwargs) and (plugin_kwargs["advanced_arg"] == ""): plugin_kwargs.pop("advanced_arg") + more_req = plugin_kwargs.get("advanced_arg", "") + no_cache = more_req.startswith("--no-cache") + if no_cache: more_req.lstrip("--no-cache") + allow_cache = not no_cache + _switch_prompt_ = partial(switch_prompt, more_requirement=more_req) + + # <-------------- check deps -------------> + try: + import glob, os, time, subprocess + subprocess.Popen(['pdflatex', '-version']) + from .latex_fns.latex_actions import Latex精细分解与转化, 编译Latex + except Exception as e: + chatbot.append([f"解析项目: {txt}", + f"尝试执行Latex指令失败。Latex没有安装, 或者不在环境变量PATH中。安装方法https://tug.org/texlive/。报错信息\n\n```\n\n{trimmed_format_exc()}\n\n```\n\n"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # <-------------- clear history and read input -------------> + if os.path.exists(txt): + project_folder = txt + else: + if txt == "": txt = '空空如也的输入栏' + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无法处理: {txt}") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.pdf', recursive=True)] + if len(file_manifest) == 0: + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.pdf文件: {txt}") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + if len(file_manifest) != 1: + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"不支持同时处理多个pdf文件: {txt}") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + app_id, app_key = get_conf('MATHPIX_APPID', 'MATHPIX_APPKEY') + if len(app_id) == 0 or len(app_key) == 0: + report_exception(chatbot, history, a="缺失 MATHPIX_APPID 和 MATHPIX_APPKEY。", b=f"请配置 MATHPIX_APPID 和 MATHPIX_APPKEY") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # <-------------- convert pdf into tex -------------> + project_folder = pdf2tex_project(file_manifest[0]) + + # Translate English Latex to Chinese Latex, and compile it + file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.tex', recursive=True)] + if len(file_manifest) == 0: + report_exception(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.tex文件: {txt}") + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # <-------------- if is a zip/tar file -------------> + project_folder = desend_to_extracted_folder_if_exist(project_folder) + + # <-------------- move latex project away from temp folder -------------> + project_folder = move_project(project_folder) + + # <-------------- if merge_translate_zh is already generated, skip gpt req -------------> + if not os.path.exists(project_folder + '/merge_translate_zh.tex'): + yield from Latex精细分解与转化(file_manifest, project_folder, llm_kwargs, plugin_kwargs, + chatbot, history, system_prompt, mode='translate_zh', + switch_prompt=_switch_prompt_) + + # <-------------- compile PDF -------------> + success = yield from 编译Latex(chatbot, history, main_file_original='merge', + main_file_modified='merge_translate_zh', mode='translate_zh', + work_folder_original=project_folder, work_folder_modified=project_folder, + work_folder=project_folder) + + # <-------------- zip PDF -------------> + zip_res = zip_result(project_folder) + if success: + chatbot.append((f"成功啦", '请查收结果(压缩包)...')) + yield from update_ui(chatbot=chatbot, history=history); + time.sleep(1) # 刷新界面 + promote_file_to_downloadzone(file=zip_res, chatbot=chatbot) + else: + chatbot.append((f"失败了", + '虽然PDF生成失败了, 但请查收结果(压缩包), 内含已经翻译的Tex文档, 您可以到Github Issue区, 用该压缩包进行反馈。如系统是Linux,请检查系统字体(见Github wiki) ...')) + yield from update_ui(chatbot=chatbot, history=history); + time.sleep(1) # 刷新界面 + promote_file_to_downloadzone(file=zip_res, chatbot=chatbot) + + # <-------------- we are done -------------> + return success diff --git a/crazy_functions/agent_fns/pipe.py b/crazy_functions/agent_fns/pipe.py index bb3bc78520d50b0a7995d0390208f69867c5b7e1..a292af810ef23992b036cc0697785268bc8a6250 100644 --- a/crazy_functions/agent_fns/pipe.py +++ b/crazy_functions/agent_fns/pipe.py @@ -9,7 +9,7 @@ class PipeCom: class PluginMultiprocessManager: - def __init__(self, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + def __init__(self, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # ⭐ run in main process self.autogen_work_dir = os.path.join(get_log_folder("autogen"), gen_time_str()) self.previous_work_dir_files = {} @@ -18,7 +18,7 @@ class PluginMultiprocessManager: self.chatbot = chatbot self.history = history self.system_prompt = system_prompt - # self.web_port = web_port + # self.user_request = user_request self.alive = True self.use_docker = get_conf("AUTOGEN_USE_DOCKER") self.last_user_input = "" diff --git "a/crazy_functions/chatglm\345\276\256\350\260\203\345\267\245\345\205\267.py" "b/crazy_functions/chatglm\345\276\256\350\260\203\345\267\245\345\205\267.py" index 336d7cfc85ac159841758123fa057bd20a0bbbec..1b28228290f9ee7873787b420ed3fa742df427fa 100644 --- "a/crazy_functions/chatglm\345\276\256\350\260\203\345\267\245\345\205\267.py" +++ "b/crazy_functions/chatglm\345\276\256\350\260\203\345\267\245\345\205\267.py" @@ -32,7 +32,7 @@ def string_to_options(arguments): return args @CatchException -def 微调数据集生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 微调数据集生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -40,7 +40,7 @@ def 微调数据集生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, syst chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 chatbot.append(("这是什么功能?", "[Local Message] 微调数据集生成")) @@ -80,7 +80,7 @@ def 微调数据集生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, syst @CatchException -def 启动微调(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 启动微调(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -88,7 +88,7 @@ def 启动微调(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ import subprocess history = [] # 清空历史,以免输入溢出 diff --git a/crazy_functions/crazy_utils.py b/crazy_functions/crazy_utils.py index e7e625b9c505827fde3a7fd1341ac19273c7d147..9c8aeccb65f567aca3ce3c2bfda066bafd9c5cba 100644 --- a/crazy_functions/crazy_utils.py +++ b/crazy_functions/crazy_utils.py @@ -12,7 +12,7 @@ def input_clipping(inputs, history, max_token_limit): mode = 'input-and-history' # 当 输入部分的token占比 小于 全文的一半时,只裁剪历史 input_token_num = get_token_num(inputs) - if input_token_num < max_token_limit//2: + if input_token_num < max_token_limit//2: mode = 'only-history' max_token_limit = max_token_limit - input_token_num @@ -21,7 +21,7 @@ def input_clipping(inputs, history, max_token_limit): n_token = get_token_num('\n'.join(everything)) everything_token = [get_token_num(e) for e in everything] delta = max(everything_token) // 16 # 截断时的颗粒度 - + while n_token > max_token_limit: where = np.argmax(everything_token) encoded = enc.encode(everything[where], disallowed_special=()) @@ -38,9 +38,9 @@ def input_clipping(inputs, history, max_token_limit): return inputs, history def request_gpt_model_in_new_thread_with_ui_alive( - inputs, inputs_show_user, llm_kwargs, + inputs, inputs_show_user, llm_kwargs, chatbot, history, sys_prompt, refresh_interval=0.2, - handle_token_exceed=True, + handle_token_exceed=True, retry_times_at_unknown_error=2, ): """ @@ -77,7 +77,7 @@ def request_gpt_model_in_new_thread_with_ui_alive( exceeded_cnt = 0 while True: # watchdog error - if len(mutable) >= 2 and (time.time()-mutable[1]) > watch_dog_patience: + if len(mutable) >= 2 and (time.time()-mutable[1]) > watch_dog_patience: raise RuntimeError("检测到程序终止。") try: # 【第一种情况】:顺利完成 @@ -140,12 +140,12 @@ def can_multi_process(llm): if llm.startswith('api2d-'): return True if llm.startswith('azure-'): return True if llm.startswith('spark'): return True - if llm.startswith('zhipuai'): return True + if llm.startswith('zhipuai') or llm.startswith('glm-'): return True return False def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( - inputs_array, inputs_show_user_array, llm_kwargs, - chatbot, history_array, sys_prompt_array, + inputs_array, inputs_show_user_array, llm_kwargs, + chatbot, history_array, sys_prompt_array, refresh_interval=0.2, max_workers=-1, scroller_max_len=30, handle_token_exceed=True, show_user_at_complete=False, retry_times_at_unknown_error=2, @@ -189,7 +189,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( # 屏蔽掉 chatglm的多线程,可能会导致严重卡顿 if not can_multi_process(llm_kwargs['llm_model']): max_workers = 1 - + executor = ThreadPoolExecutor(max_workers=max_workers) n_frag = len(inputs_array) # 用户反馈 @@ -214,7 +214,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( try: # 【第一种情况】:顺利完成 gpt_say = predict_no_ui_long_connection( - inputs=inputs, llm_kwargs=llm_kwargs, history=history, + inputs=inputs, llm_kwargs=llm_kwargs, history=history, sys_prompt=sys_prompt, observe_window=mutable[index], console_slience=True ) mutable[index][2] = "已成功" @@ -246,7 +246,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( print(tb_str) gpt_say += f"[Local Message] 警告,线程{index}在执行过程中遭遇问题, Traceback:\n\n{tb_str}\n\n" if len(mutable[index][0]) > 0: gpt_say += "此线程失败前收到的回答:\n\n" + mutable[index][0] - if retry_op > 0: + if retry_op > 0: retry_op -= 1 wait = random.randint(5, 20) if ("Rate limit reached" in tb_str) or ("Too Many Requests" in tb_str): @@ -284,12 +284,11 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( # 在前端打印些好玩的东西 for thread_index, _ in enumerate(worker_done): print_something_really_funny = "[ ...`"+mutable[thread_index][0][-scroller_max_len:].\ - replace('\n', '').replace('`', '.').replace( - ' ', '.').replace('
', '.....').replace('$', '.')+"`... ]" + replace('\n', '').replace('`', '.').replace(' ', '.').replace('
', '.....').replace('$', '.')+"`... ]" observe_win.append(print_something_really_funny) # 在前端打印些好玩的东西 - stat_str = ''.join([f'`{mutable[thread_index][2]}`: {obs}\n\n' - if not done else f'`{mutable[thread_index][2]}`\n\n' + stat_str = ''.join([f'`{mutable[thread_index][2]}`: {obs}\n\n' + if not done else f'`{mutable[thread_index][2]}`\n\n' for thread_index, done, obs in zip(range(len(worker_done)), worker_done, observe_win)]) # 在前端打印些好玩的东西 chatbot[-1] = [chatbot[-1][0], f'多线程操作已经开始,完成情况: \n\n{stat_str}' + ''.join(['.']*(cnt % 10+1))] @@ -303,7 +302,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( for inputs_show_user, f in zip(inputs_show_user_array, futures): gpt_res = f.result() gpt_response_collection.extend([inputs_show_user, gpt_res]) - + # 是否在结束时,在界面上显示结果 if show_user_at_complete: for inputs_show_user, f in zip(inputs_show_user_array, futures): @@ -353,7 +352,7 @@ def read_and_clean_pdf_text(fp): if wtf['size'] not in fsize_statiscs: fsize_statiscs[wtf['size']] = 0 fsize_statiscs[wtf['size']] += len(wtf['text']) return max(fsize_statiscs, key=fsize_statiscs.get) - + def ffsize_same(a,b): """ 提取字体大小是否近似相等 @@ -389,7 +388,7 @@ def read_and_clean_pdf_text(fp): if index == 0: page_one_meta = [" ".join(["".join([wtf['text'] for wtf in l['spans']]) for l in t['lines']]).replace( '- ', '') for t in text_areas['blocks'] if 'lines' in t] - + ############################## <第 2 步,获取正文主字体> ################################## try: fsize_statiscs = {} @@ -405,7 +404,7 @@ def read_and_clean_pdf_text(fp): mega_sec = [] sec = [] for index, line in enumerate(meta_line): - if index == 0: + if index == 0: sec.append(line[fc]) continue if REMOVE_FOOT_NOTE: @@ -502,12 +501,12 @@ def get_files_from_everything(txt, type): # type='.md' """ 这个函数是用来获取指定目录下所有指定类型(如.md)的文件,并且对于网络上的文件,也可以获取它。 下面是对每个参数和返回值的说明: - 参数 - - txt: 路径或网址,表示要搜索的文件或者文件夹路径或网络上的文件。 + 参数 + - txt: 路径或网址,表示要搜索的文件或者文件夹路径或网络上的文件。 - type: 字符串,表示要搜索的文件类型。默认是.md。 - 返回值 - - success: 布尔值,表示函数是否成功执行。 - - file_manifest: 文件路径列表,里面包含以指定类型为后缀名的所有文件的绝对路径。 + 返回值 + - success: 布尔值,表示函数是否成功执行。 + - file_manifest: 文件路径列表,里面包含以指定类型为后缀名的所有文件的绝对路径。 - project_folder: 字符串,表示文件所在的文件夹路径。如果是网络上的文件,就是临时文件夹的路径。 该函数详细注释已添加,请确认是否满足您的需要。 """ @@ -571,7 +570,7 @@ class nougat_interface(): def NOUGAT_parse_pdf(self, fp, chatbot, history): from toolbox import update_ui_lastest_msg - yield from update_ui_lastest_msg("正在解析论文, 请稍候。进度:正在排队, 等待线程锁...", + yield from update_ui_lastest_msg("正在解析论文, 请稍候。进度:正在排队, 等待线程锁...", chatbot=chatbot, history=history, delay=0) self.threadLock.acquire() import glob, threading, os @@ -579,7 +578,7 @@ class nougat_interface(): dst = os.path.join(get_log_folder(plugin_name='nougat'), gen_time_str()) os.makedirs(dst) - yield from update_ui_lastest_msg("正在解析论文, 请稍候。进度:正在加载NOUGAT... (提示:首次运行需要花费较长时间下载NOUGAT参数)", + yield from update_ui_lastest_msg("正在解析论文, 请稍候。进度:正在加载NOUGAT... (提示:首次运行需要花费较长时间下载NOUGAT参数)", chatbot=chatbot, history=history, delay=0) self.nougat_with_timeout(f'nougat --out "{os.path.abspath(dst)}" "{os.path.abspath(fp)}"', os.getcwd(), timeout=3600) res = glob.glob(os.path.join(dst,'*.mmd')) diff --git a/crazy_functions/diagram_fns/file_tree.py b/crazy_functions/diagram_fns/file_tree.py new file mode 100644 index 0000000000000000000000000000000000000000..fa7e2e4c4bf56329b0d6c8beb8c5de2cbdbce8b0 --- /dev/null +++ b/crazy_functions/diagram_fns/file_tree.py @@ -0,0 +1,122 @@ +import os +from textwrap import indent + +class FileNode: + def __init__(self, name): + self.name = name + self.children = [] + self.is_leaf = False + self.level = 0 + self.parenting_ship = [] + self.comment = "" + self.comment_maxlen_show = 50 + + @staticmethod + def add_linebreaks_at_spaces(string, interval=10): + return '\n'.join(string[i:i+interval] for i in range(0, len(string), interval)) + + def sanitize_comment(self, comment): + if len(comment) > self.comment_maxlen_show: suf = '...' + else: suf = '' + comment = comment[:self.comment_maxlen_show] + comment = comment.replace('\"', '').replace('`', '').replace('\n', '').replace('`', '').replace('$', '') + comment = self.add_linebreaks_at_spaces(comment, 10) + return '`' + comment + suf + '`' + + def add_file(self, file_path, file_comment): + directory_names, file_name = os.path.split(file_path) + current_node = self + level = 1 + if directory_names == "": + new_node = FileNode(file_name) + current_node.children.append(new_node) + new_node.is_leaf = True + new_node.comment = self.sanitize_comment(file_comment) + new_node.level = level + current_node = new_node + else: + dnamesplit = directory_names.split(os.sep) + for i, directory_name in enumerate(dnamesplit): + found_child = False + level += 1 + for child in current_node.children: + if child.name == directory_name: + current_node = child + found_child = True + break + if not found_child: + new_node = FileNode(directory_name) + current_node.children.append(new_node) + new_node.level = level - 1 + current_node = new_node + term = FileNode(file_name) + term.level = level + term.comment = self.sanitize_comment(file_comment) + term.is_leaf = True + current_node.children.append(term) + + def print_files_recursively(self, level=0, code="R0"): + print(' '*level + self.name + ' ' + str(self.is_leaf) + ' ' + str(self.level)) + for j, child in enumerate(self.children): + child.print_files_recursively(level=level+1, code=code+str(j)) + self.parenting_ship.extend(child.parenting_ship) + p1 = f"""{code}[\"🗎{self.name}\"]""" if self.is_leaf else f"""{code}[[\"📁{self.name}\"]]""" + p2 = """ --> """ + p3 = f"""{code+str(j)}[\"🗎{child.name}\"]""" if child.is_leaf else f"""{code+str(j)}[[\"📁{child.name}\"]]""" + edge_code = p1 + p2 + p3 + if edge_code in self.parenting_ship: + continue + self.parenting_ship.append(edge_code) + if self.comment != "": + pc1 = f"""{code}[\"🗎{self.name}\"]""" if self.is_leaf else f"""{code}[[\"📁{self.name}\"]]""" + pc2 = f""" -.-x """ + pc3 = f"""C{code}[\"{self.comment}\"]:::Comment""" + edge_code = pc1 + pc2 + pc3 + self.parenting_ship.append(edge_code) + + +MERMAID_TEMPLATE = r""" +```mermaid +flowchart LR + %% 一个特殊标记,用于在生成mermaid图表时隐藏代码块 + classDef Comment stroke-dasharray: 5 5 + subgraph {graph_name} +{relationship} + end +``` +""" + +def build_file_tree_mermaid_diagram(file_manifest, file_comments, graph_name): + # Create the root node + file_tree_struct = FileNode("root") + # Build the tree structure + for file_path, file_comment in zip(file_manifest, file_comments): + file_tree_struct.add_file(file_path, file_comment) + file_tree_struct.print_files_recursively() + cc = "\n".join(file_tree_struct.parenting_ship) + ccc = indent(cc, prefix=" "*8) + return MERMAID_TEMPLATE.format(graph_name=graph_name, relationship=ccc) + +if __name__ == "__main__": + # File manifest + file_manifest = [ + "cradle_void_terminal.ipynb", + "tests/test_utils.py", + "tests/test_plugins.py", + "tests/test_llms.py", + "config.py", + "build/ChatGLM-6b-onnx-u8s8/chatglm-6b-int8-onnx-merged/model_weights_0.bin", + "crazy_functions/latex_fns/latex_actions.py", + "crazy_functions/latex_fns/latex_toolbox.py" + ] + file_comments = [ + "根据位置和名称,可能是一个模块的初始化文件根据位置和名称,可能是一个模块的初始化文件根据位置和名称,可能是一个模块的初始化文件", + "包含一些用于文本处理和模型微调的函数和装饰器包含一些用于文本处理和模型微调的函数和装饰器包含一些用于文本处理和模型微调的函数和装饰器", + "用于构建HTML报告的类和方法用于构建HTML报告的类和方法用于构建HTML报告的类和方法", + "包含了用于文本切分的函数,以及处理PDF文件的示例代码包含了用于文本切分的函数,以及处理PDF文件的示例代码包含了用于文本切分的函数,以及处理PDF文件的示例代码", + "用于解析和翻译PDF文件的功能和相关辅助函数用于解析和翻译PDF文件的功能和相关辅助函数用于解析和翻译PDF文件的功能和相关辅助函数", + "是一个包的初始化文件,用于初始化包的属性和导入模块是一个包的初始化文件,用于初始化包的属性和导入模块是一个包的初始化文件,用于初始化包的属性和导入模块", + "用于加载和分割文件中的文本的通用文件加载器用于加载和分割文件中的文本的通用文件加载器用于加载和分割文件中的文本的通用文件加载器", + "包含了用于构建和管理向量数据库的函数和类包含了用于构建和管理向量数据库的函数和类包含了用于构建和管理向量数据库的函数和类", + ] + print(build_file_tree_mermaid_diagram(file_manifest, file_comments, "项目文件树")) \ No newline at end of file diff --git a/crazy_functions/pdf_fns/parse_word.py b/crazy_functions/pdf_fns/parse_word.py new file mode 100644 index 0000000000000000000000000000000000000000..64d07dcd48156162eea40b8b9fd3c105ccbf1af2 --- /dev/null +++ b/crazy_functions/pdf_fns/parse_word.py @@ -0,0 +1,85 @@ +from crazy_functions.crazy_utils import read_and_clean_pdf_text, get_files_from_everything +import os +import re +def extract_text_from_files(txt, chatbot, history): + """ + 查找pdf/md/word并获取文本内容并返回状态以及文本 + + 输入参数 Args: + chatbot: chatbot inputs and outputs (用户界面对话窗口句柄,用于数据流可视化) + history (list): List of chat history (历史,对话历史列表) + + 输出 Returns: + 文件是否存在(bool) + final_result(list):文本内容 + page_one(list):第一页内容/摘要 + file_manifest(list):文件路径 + excption(string):需要用户手动处理的信息,如没出错则保持为空 + """ + + final_result = [] + page_one = [] + file_manifest = [] + excption = "" + + if txt == "": + final_result.append(txt) + return False, final_result, page_one, file_manifest, excption #如输入区内容不是文件则直接返回输入区内容 + + #查找输入区内容中的文件 + file_pdf,pdf_manifest,folder_pdf = get_files_from_everything(txt, '.pdf') + file_md,md_manifest,folder_md = get_files_from_everything(txt, '.md') + file_word,word_manifest,folder_word = get_files_from_everything(txt, '.docx') + file_doc,doc_manifest,folder_doc = get_files_from_everything(txt, '.doc') + + if file_doc: + excption = "word" + return False, final_result, page_one, file_manifest, excption + + file_num = len(pdf_manifest) + len(md_manifest) + len(word_manifest) + if file_num == 0: + final_result.append(txt) + return False, final_result, page_one, file_manifest, excption #如输入区内容不是文件则直接返回输入区内容 + + if file_pdf: + try: # 尝试导入依赖,如果缺少依赖,则给出安装建议 + import fitz + except: + excption = "pdf" + return False, final_result, page_one, file_manifest, excption + for index, fp in enumerate(pdf_manifest): + file_content, pdf_one = read_and_clean_pdf_text(fp) # (尝试)按照章节切割PDF + file_content = file_content.encode('utf-8', 'ignore').decode() # avoid reading non-utf8 chars + pdf_one = str(pdf_one).encode('utf-8', 'ignore').decode() # avoid reading non-utf8 chars + final_result.append(file_content) + page_one.append(pdf_one) + file_manifest.append(os.path.relpath(fp, folder_pdf)) + + if file_md: + for index, fp in enumerate(md_manifest): + with open(fp, 'r', encoding='utf-8', errors='replace') as f: + file_content = f.read() + file_content = file_content.encode('utf-8', 'ignore').decode() + headers = re.findall(r'^#\s(.*)$', file_content, re.MULTILINE) #接下来提取md中的一级/二级标题作为摘要 + if len(headers) > 0: + page_one.append("\n".join(headers)) #合并所有的标题,以换行符分割 + else: + page_one.append("") + final_result.append(file_content) + file_manifest.append(os.path.relpath(fp, folder_md)) + + if file_word: + try: # 尝试导入依赖,如果缺少依赖,则给出安装建议 + from docx import Document + except: + excption = "word_pip" + return False, final_result, page_one, file_manifest, excption + for index, fp in enumerate(word_manifest): + doc = Document(fp) + file_content = '\n'.join([p.text for p in doc.paragraphs]) + file_content = file_content.encode('utf-8', 'ignore').decode() + page_one.append(file_content[:200]) + final_result.append(file_content) + file_manifest.append(os.path.relpath(fp, folder_word)) + + return True, final_result, page_one, file_manifest, excption \ No newline at end of file diff --git "a/crazy_functions/\344\270\213\350\275\275arxiv\350\256\272\346\226\207\347\277\273\350\257\221\346\221\230\350\246\201.py" "b/crazy_functions/\344\270\213\350\275\275arxiv\350\256\272\346\226\207\347\277\273\350\257\221\346\221\230\350\246\201.py" index 1e0fe63052b64503f219370bcb51b8aeba18f155..c368b7d66bced9c4ffde805f1f87d367d4c301ee 100644 --- "a/crazy_functions/\344\270\213\350\275\275arxiv\350\256\272\346\226\207\347\277\273\350\257\221\346\221\230\350\246\201.py" +++ "b/crazy_functions/\344\270\213\350\275\275arxiv\350\256\272\346\226\207\347\277\273\350\257\221\346\221\230\350\246\201.py" @@ -130,7 +130,7 @@ def get_name(_url_): @CatchException -def 下载arxiv论文并翻译摘要(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 下载arxiv论文并翻译摘要(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): CRAZY_FUNCTION_INFO = "下载arxiv论文并翻译摘要,函数插件作者[binary-husky]。正在提取摘要并下载PDF文档……" import glob diff --git "a/crazy_functions/\344\272\222\345\212\250\345\260\217\346\270\270\346\210\217.py" "b/crazy_functions/\344\272\222\345\212\250\345\260\217\346\270\270\346\210\217.py" index f3786c31ca99b4c519e84426f4a24c151956dbb5..131e9c91954d164f96b1826869eac6477fe4de5f 100644 --- "a/crazy_functions/\344\272\222\345\212\250\345\260\217\346\270\270\346\210\217.py" +++ "b/crazy_functions/\344\272\222\345\212\250\345\260\217\346\270\270\346\210\217.py" @@ -5,7 +5,7 @@ from request_llms.bridge_all import predict_no_ui_long_connection from crazy_functions.game_fns.game_utils import get_code_block, is_same_thing @CatchException -def 随机小游戏(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 随机小游戏(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): from crazy_functions.game_fns.game_interactive_story import MiniGame_ResumeStory # 清空历史 history = [] @@ -23,7 +23,7 @@ def 随机小游戏(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_ @CatchException -def 随机小游戏1(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 随机小游戏1(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): from crazy_functions.game_fns.game_ascii_art import MiniGame_ASCII_Art # 清空历史 history = [] diff --git "a/crazy_functions/\344\272\244\344\272\222\345\212\237\350\203\275\345\207\275\346\225\260\346\250\241\346\235\277.py" "b/crazy_functions/\344\272\244\344\272\222\345\212\237\350\203\275\345\207\275\346\225\260\346\250\241\346\235\277.py" index d57fc2b0f0fb604be1dc19f789815eb7833bef7f..811267a321e34896257b612f6797f095625bf962 100644 --- "a/crazy_functions/\344\272\244\344\272\222\345\212\237\350\203\275\345\207\275\346\225\260\346\250\241\346\235\277.py" +++ "b/crazy_functions/\344\272\244\344\272\222\345\212\237\350\203\275\345\207\275\346\225\260\346\250\241\346\235\277.py" @@ -3,7 +3,7 @@ from .crazy_utils import request_gpt_model_in_new_thread_with_ui_alive @CatchException -def 交互功能模板函数(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 交互功能模板函数(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数, 如温度和top_p等, 一般原样传递下去就行 @@ -11,7 +11,7 @@ def 交互功能模板函数(txt, llm_kwargs, plugin_kwargs, chatbot, history, s chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 chatbot.append(("这是什么功能?", "交互功能函数模板。在执行完成之后, 可以将自身的状态存储到cookie中, 等待用户的再次调用。")) diff --git "a/crazy_functions/\345\207\275\346\225\260\345\212\250\346\200\201\347\224\237\346\210\220.py" "b/crazy_functions/\345\207\275\346\225\260\345\212\250\346\200\201\347\224\237\346\210\220.py" index d16ef88d969558a726d9e16e26f8de0fe1b5067c..d20d0cf579d2357306e040570f708d4f26a8912a 100644 --- "a/crazy_functions/\345\207\275\346\225\260\345\212\250\346\200\201\347\224\237\346\210\220.py" +++ "b/crazy_functions/\345\207\275\346\225\260\345\212\250\346\200\201\347\224\237\346\210\220.py" @@ -139,7 +139,7 @@ def get_recent_file_prompt_support(chatbot): return path @CatchException -def 函数动态生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 函数动态生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -147,7 +147,7 @@ def 函数动态生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_ chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ # 清空历史 diff --git "a/crazy_functions/\345\221\275\344\273\244\350\241\214\345\212\251\346\211\213.py" "b/crazy_functions/\345\221\275\344\273\244\350\241\214\345\212\251\346\211\213.py" index 4cbc08471822f51e2b4bc01c8ebfad25c1032f49..286952445a1f4f262e47fec82a3f61243f85b5e5 100644 --- "a/crazy_functions/\345\221\275\344\273\244\350\241\214\345\212\251\346\211\213.py" +++ "b/crazy_functions/\345\221\275\344\273\244\350\241\214\345\212\251\346\211\213.py" @@ -4,7 +4,7 @@ from .crazy_utils import input_clipping import copy, json @CatchException -def 命令行助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 命令行助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本, 例如需要翻译的一段话, 再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数, 如温度和top_p等, 一般原样传递下去就行 @@ -12,7 +12,7 @@ def 命令行助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_pro chatbot 聊天显示框的句柄, 用于显示给用户 history 聊天历史, 前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ # 清空历史, 以免输入溢出 history = [] diff --git "a/crazy_functions/\345\233\276\347\211\207\347\224\237\346\210\220.py" "b/crazy_functions/\345\233\276\347\211\207\347\224\237\346\210\220.py" index 9751e1647cb2dbea3c88848f937135a05e8c5cb4..62f36626143c29a6c31f20a0067e2775ad870ce6 100644 --- "a/crazy_functions/\345\233\276\347\211\207\347\224\237\346\210\220.py" +++ "b/crazy_functions/\345\233\276\347\211\207\347\224\237\346\210\220.py" @@ -93,7 +93,7 @@ def edit_image(llm_kwargs, prompt, image_path, resolution="1024x1024", model="da @CatchException -def 图片生成_DALLE2(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 图片生成_DALLE2(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -101,7 +101,7 @@ def 图片生成_DALLE2(prompt, llm_kwargs, plugin_kwargs, chatbot, history, sys chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 if prompt.strip() == "": @@ -123,7 +123,7 @@ def 图片生成_DALLE2(prompt, llm_kwargs, plugin_kwargs, chatbot, history, sys @CatchException -def 图片生成_DALLE3(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 图片生成_DALLE3(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 if prompt.strip() == "": chatbot.append((prompt, "[Local Message] 图像生成提示为空白,请在“输入区”输入图像生成提示。")) @@ -209,7 +209,7 @@ class ImageEditState(GptAcademicState): return all([x['value'] is not None for x in self.req]) @CatchException -def 图片修改_DALLE2(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 图片修改_DALLE2(prompt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 尚未完成 history = [] # 清空历史 state = ImageEditState.get_state(chatbot, ImageEditState) diff --git "a/crazy_functions/\345\244\232\346\231\272\350\203\275\344\275\223.py" "b/crazy_functions/\345\244\232\346\231\272\350\203\275\344\275\223.py" index 0e76f5c690a812311905da9f3ef131790cae5ba3..4b16b8846d46c9589a001c159ef70d5dc475c8ad 100644 --- "a/crazy_functions/\345\244\232\346\231\272\350\203\275\344\275\223.py" +++ "b/crazy_functions/\345\244\232\346\231\272\350\203\275\344\275\223.py" @@ -21,7 +21,7 @@ def remove_model_prefix(llm): @CatchException -def 多智能体终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 多智能体终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -29,7 +29,7 @@ def 多智能体终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_ chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ # 检查当前的模型是否符合要求 supported_llms = [ @@ -89,7 +89,7 @@ def 多智能体终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_ history = [] chatbot.append(["正在启动: 多智能体终端", "插件动态生成, 执行开始, 作者 Microsoft & Binary-Husky."]) yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 - executor = AutoGenMath(llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port) + executor = AutoGenMath(llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request) persistent_class_multi_user_manager.set(persistent_key, executor) exit_reason = yield from executor.main_process_ui_control(txt, create_or_resume="create") diff --git "a/crazy_functions/\345\257\271\350\257\235\345\216\206\345\217\262\345\255\230\346\241\243.py" "b/crazy_functions/\345\257\271\350\257\235\345\216\206\345\217\262\345\255\230\346\241\243.py" index 3f603f9b828d066ab003457d001380f66fa974a9..6ffc072f634e7c786238963929011229c352d46b 100644 --- "a/crazy_functions/\345\257\271\350\257\235\345\216\206\345\217\262\345\255\230\346\241\243.py" +++ "b/crazy_functions/\345\257\271\350\257\235\345\216\206\345\217\262\345\255\230\346\241\243.py" @@ -69,7 +69,7 @@ def read_file_to_chat(chatbot, history, file_name): return chatbot, history @CatchException -def 对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -77,7 +77,7 @@ def 对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_ chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ chatbot.append(("保存当前对话", @@ -91,7 +91,7 @@ def hide_cwd(str): return str.replace(current_path, replace_path) @CatchException -def 载入对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 载入对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -99,7 +99,7 @@ def 载入对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, s chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ from .crazy_utils import get_files_from_everything success, file_manifest, _ = get_files_from_everything(txt, type='.html') @@ -126,7 +126,7 @@ def 载入对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, s return @CatchException -def 删除所有本地对话历史记录(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 删除所有本地对话历史记录(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -134,7 +134,7 @@ def 删除所有本地对话历史记录(txt, llm_kwargs, plugin_kwargs, chatbot chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ import glob, os diff --git "a/crazy_functions/\346\200\273\347\273\223word\346\226\207\346\241\243.py" "b/crazy_functions/\346\200\273\347\273\223word\346\226\207\346\241\243.py" index 01ee1e6b5a3f79cc18d80c13d7d74c0c912bf7f4..8793ea4490c07c36688fed0ae95bbbfcbb6f073b 100644 --- "a/crazy_functions/\346\200\273\347\273\223word\346\226\207\346\241\243.py" +++ "b/crazy_functions/\346\200\273\347\273\223word\346\226\207\346\241\243.py" @@ -79,7 +79,7 @@ def 解析docx(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot @CatchException -def 总结word文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 总结word文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): import glob, os # 基本信息:功能、贡献者 diff --git "a/crazy_functions/\346\211\271\351\207\217Markdown\347\277\273\350\257\221.py" "b/crazy_functions/\346\211\271\351\207\217Markdown\347\277\273\350\257\221.py" index 8665d6dfc36d27ba01ec825d5966e7d36f7aacbb..1d876d080a7272cc1a9108416d619afd1af11d86 100644 --- "a/crazy_functions/\346\211\271\351\207\217Markdown\347\277\273\350\257\221.py" +++ "b/crazy_functions/\346\211\271\351\207\217Markdown\347\277\273\350\257\221.py" @@ -153,7 +153,7 @@ def get_files_from_everything(txt, preference=''): @CatchException -def Markdown英译中(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def Markdown英译中(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", @@ -193,7 +193,7 @@ def Markdown英译中(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_p @CatchException -def Markdown中译英(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def Markdown中译英(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", @@ -226,7 +226,7 @@ def Markdown中译英(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_p @CatchException -def Markdown翻译指定语言(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def Markdown翻译指定语言(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", diff --git "a/crazy_functions/\346\211\271\351\207\217\346\200\273\347\273\223PDF\346\226\207\346\241\243.py" "b/crazy_functions/\346\211\271\351\207\217\346\200\273\347\273\223PDF\346\226\207\346\241\243.py" index e289c47b5cd37af3d1ed1f2356cbc9ef13fdbe35..54270ab982b55d59e5b8f7cb1e0f27209cc6c83d 100644 --- "a/crazy_functions/\346\211\271\351\207\217\346\200\273\347\273\223PDF\346\226\207\346\241\243.py" +++ "b/crazy_functions/\346\211\271\351\207\217\346\200\273\347\273\223PDF\346\226\207\346\241\243.py" @@ -101,7 +101,7 @@ do not have too much repetitive information, numerical values using the original @CatchException -def 批量总结PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 批量总结PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): import glob, os # 基本信息:功能、贡献者 diff --git "a/crazy_functions/\346\211\271\351\207\217\346\200\273\347\273\223PDF\346\226\207\346\241\243pdfminer.py" "b/crazy_functions/\346\211\271\351\207\217\346\200\273\347\273\223PDF\346\226\207\346\241\243pdfminer.py" index a729efaa40cc12197c566c1a6fa59c1804bd8707..181d51ce1b40ae0f25429fa71a056160202296c9 100644 --- "a/crazy_functions/\346\211\271\351\207\217\346\200\273\347\273\223PDF\346\226\207\346\241\243pdfminer.py" +++ "b/crazy_functions/\346\211\271\351\207\217\346\200\273\347\273\223PDF\346\226\207\346\241\243pdfminer.py" @@ -124,7 +124,7 @@ def 解析Paper(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbo @CatchException -def 批量总结PDF文档pdfminer(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 批量总结PDF文档pdfminer(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os diff --git "a/crazy_functions/\346\211\271\351\207\217\347\277\273\350\257\221PDF\346\226\207\346\241\243_NOUGAT.py" "b/crazy_functions/\346\211\271\351\207\217\347\277\273\350\257\221PDF\346\226\207\346\241\243_NOUGAT.py" index 97170d0eddac7b55c6ce1d1aa96c1ec16b98cc7d..7a18277778df2c1044eeeb5524ee6f3dff78f982 100644 --- "a/crazy_functions/\346\211\271\351\207\217\347\277\273\350\257\221PDF\346\226\207\346\241\243_NOUGAT.py" +++ "b/crazy_functions/\346\211\271\351\207\217\347\277\273\350\257\221PDF\346\226\207\346\241\243_NOUGAT.py" @@ -48,7 +48,7 @@ def markdown_to_dict(article_content): @CatchException -def 批量翻译PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 批量翻译PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): disable_auto_promotion(chatbot) # 基本信息:功能、贡献者 diff --git "a/crazy_functions/\346\211\271\351\207\217\347\277\273\350\257\221PDF\346\226\207\346\241\243_\345\244\232\347\272\277\347\250\213.py" "b/crazy_functions/\346\211\271\351\207\217\347\277\273\350\257\221PDF\346\226\207\346\241\243_\345\244\232\347\272\277\347\250\213.py" index a1f0f312c8170206d84a0ed2ba1c9a44216753a9..3d111629ae58ba1f1d8887e3dabbe18c45d746c4 100644 --- "a/crazy_functions/\346\211\271\351\207\217\347\277\273\350\257\221PDF\346\226\207\346\241\243_\345\244\232\347\272\277\347\250\213.py" +++ "b/crazy_functions/\346\211\271\351\207\217\347\277\273\350\257\221PDF\346\226\207\346\241\243_\345\244\232\347\272\277\347\250\213.py" @@ -10,7 +10,7 @@ import os @CatchException -def 批量翻译PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 批量翻译PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): disable_auto_promotion(chatbot) # 基本信息:功能、贡献者 diff --git "a/crazy_functions/\346\225\260\345\255\246\345\212\250\347\224\273\347\224\237\346\210\220manim.py" "b/crazy_functions/\346\225\260\345\255\246\345\212\250\347\224\273\347\224\237\346\210\220manim.py" index 0acf5592c15096d46666b3ffbb41115a1db34429..9465cccd10580fb551e0866867262143ab1b2d4e 100644 --- "a/crazy_functions/\346\225\260\345\255\246\345\212\250\347\224\273\347\224\237\346\210\220manim.py" +++ "b/crazy_functions/\346\225\260\345\255\246\345\212\250\347\224\273\347\224\237\346\210\220manim.py" @@ -50,7 +50,7 @@ def get_code_block(reply): return matches[0].strip('python') # code block @CatchException -def 动画生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 动画生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -58,7 +58,7 @@ def 动画生成(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ # 清空历史,以免输入溢出 history = [] diff --git "a/crazy_functions/\347\220\206\350\247\243PDF\346\226\207\346\241\243\345\206\205\345\256\271.py" "b/crazy_functions/\347\220\206\350\247\243PDF\346\226\207\346\241\243\345\206\205\345\256\271.py" index 439d78eadb2a92b12ed3eb07675b0eebf1130413..732c82c08db5f33b43452a33d26347814e4be5db 100644 --- "a/crazy_functions/\347\220\206\350\247\243PDF\346\226\207\346\241\243\345\206\205\345\256\271.py" +++ "b/crazy_functions/\347\220\206\350\247\243PDF\346\226\207\346\241\243\345\206\205\345\256\271.py" @@ -63,7 +63,7 @@ def 解析PDF(file_name, llm_kwargs, plugin_kwargs, chatbot, history, system_pro @CatchException -def 理解PDF文档内容标准文件输入(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 理解PDF文档内容标准文件输入(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): import glob, os # 基本信息:功能、贡献者 diff --git "a/crazy_functions/\347\224\237\346\210\220\345\207\275\346\225\260\346\263\250\351\207\212.py" "b/crazy_functions/\347\224\237\346\210\220\345\207\275\346\225\260\346\263\250\351\207\212.py" index d71a568053e5db18c58a9beddf35d7981592d1c2..78aa45355616c2f51570a71f719ede0eff18b549 100644 --- "a/crazy_functions/\347\224\237\346\210\220\345\207\275\346\225\260\346\263\250\351\207\212.py" +++ "b/crazy_functions/\347\224\237\346\210\220\345\207\275\346\225\260\346\263\250\351\207\212.py" @@ -36,7 +36,7 @@ def 生成函数注释(file_manifest, project_folder, llm_kwargs, plugin_kwargs, @CatchException -def 批量生成函数注释(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 批量生成函数注释(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): diff --git "a/crazy_functions/\347\224\237\346\210\220\345\244\232\347\247\215Mermaid\345\233\276\350\241\250.py" "b/crazy_functions/\347\224\237\346\210\220\345\244\232\347\247\215Mermaid\345\233\276\350\241\250.py" new file mode 100644 index 0000000000000000000000000000000000000000..dc01e9405343f39ee2e19648a0045b230a73f163 --- /dev/null +++ "b/crazy_functions/\347\224\237\346\210\220\345\244\232\347\247\215Mermaid\345\233\276\350\241\250.py" @@ -0,0 +1,296 @@ +from toolbox import CatchException, update_ui, report_exception +from .crazy_utils import request_gpt_model_in_new_thread_with_ui_alive +import datetime + +#以下是每类图表的PROMPT +SELECT_PROMPT = """ +“{subject}” +============= +以上是从文章中提取的摘要,将会使用这些摘要绘制图表。请你选择一个合适的图表类型: +1 流程图 +2 序列图 +3 类图 +4 饼图 +5 甘特图 +6 状态图 +7 实体关系图 +8 象限提示图 +不需要解释原因,仅需要输出单个不带任何标点符号的数字。 +""" +#没有思维导图!!!测试发现模型始终会优先选择思维导图 +#流程图 +PROMPT_1 = """ +请你给出围绕“{subject}”的逻辑关系图,使用mermaid语法,mermaid语法举例: +```mermaid +graph TD + P(编程) --> L1(Python) + P(编程) --> L2(C) + P(编程) --> L3(C++) + P(编程) --> L4(Javascipt) + P(编程) --> L5(PHP) +``` +""" +#序列图 +PROMPT_2 = """ +请你给出围绕“{subject}”的序列图,使用mermaid语法,mermaid语法举例: +```mermaid +sequenceDiagram + participant A as 用户 + participant B as 系统 + A->>B: 登录请求 + B->>A: 登录成功 + A->>B: 获取数据 + B->>A: 返回数据 +``` +""" +#类图 +PROMPT_3 = """ +请你给出围绕“{subject}”的类图,使用mermaid语法,mermaid语法举例: +```mermaid +classDiagram + Class01 <|-- AveryLongClass : Cool + Class03 *-- Class04 + Class05 o-- Class06 + Class07 .. Class08 + Class09 --> C2 : Where am i? + Class09 --* C3 + Class09 --|> Class07 + Class07 : equals() + Class07 : Object[] elementData + Class01 : size() + Class01 : int chimp + Class01 : int gorilla + Class08 <--> C2: Cool label +``` +""" +#饼图 +PROMPT_4 = """ +请你给出围绕“{subject}”的饼图,使用mermaid语法,mermaid语法举例: +```mermaid +pie title Pets adopted by volunteers + "狗" : 386 + "猫" : 85 + "兔子" : 15 +``` +""" +#甘特图 +PROMPT_5 = """ +请你给出围绕“{subject}”的甘特图,使用mermaid语法,mermaid语法举例: +```mermaid +gantt + title 项目开发流程 + dateFormat YYYY-MM-DD + section 设计 + 需求分析 :done, des1, 2024-01-06,2024-01-08 + 原型设计 :active, des2, 2024-01-09, 3d + UI设计 : des3, after des2, 5d + section 开发 + 前端开发 :2024-01-20, 10d + 后端开发 :2024-01-20, 10d +``` +""" +#状态图 +PROMPT_6 = """ +请你给出围绕“{subject}”的状态图,使用mermaid语法,mermaid语法举例: +```mermaid +stateDiagram-v2 + [*] --> Still + Still --> [*] + Still --> Moving + Moving --> Still + Moving --> Crash + Crash --> [*] +``` +""" +#实体关系图 +PROMPT_7 = """ +请你给出围绕“{subject}”的实体关系图,使用mermaid语法,mermaid语法举例: +```mermaid +erDiagram + CUSTOMER ||--o{ ORDER : places + ORDER ||--|{ LINE-ITEM : contains + CUSTOMER { + string name + string id + } + ORDER { + string orderNumber + date orderDate + string customerID + } + LINE-ITEM { + number quantity + string productID + } +``` +""" +#象限提示图 +PROMPT_8 = """ +请你给出围绕“{subject}”的象限图,使用mermaid语法,mermaid语法举例: +```mermaid +graph LR + A[Hard skill] --> B(Programming) + A[Hard skill] --> C(Design) + D[Soft skill] --> E(Coordination) + D[Soft skill] --> F(Communication) +``` +""" +#思维导图 +PROMPT_9 = """ +{subject} +========== +请给出上方内容的思维导图,充分考虑其之间的逻辑,使用mermaid语法,mermaid语法举例: +```mermaid +mindmap + root((mindmap)) + Origins + Long history + ::icon(fa fa-book) + Popularisation + British popular psychology author Tony Buzan + Research + On effectiveness
and features + On Automatic creation + Uses + Creative techniques + Strategic planning + Argument mapping + Tools + Pen and paper + Mermaid +``` +""" + +def 解析历史输入(history,llm_kwargs,file_manifest,chatbot,plugin_kwargs): + ############################## <第 0 步,切割输入> ################################## + # 借用PDF切割中的函数对文本进行切割 + TOKEN_LIMIT_PER_FRAGMENT = 2500 + txt = str(history).encode('utf-8', 'ignore').decode() # avoid reading non-utf8 chars + from crazy_functions.pdf_fns.breakdown_txt import breakdown_text_to_satisfy_token_limit + txt = breakdown_text_to_satisfy_token_limit(txt=txt, limit=TOKEN_LIMIT_PER_FRAGMENT, llm_model=llm_kwargs['llm_model']) + ############################## <第 1 步,迭代地历遍整个文章,提取精炼信息> ################################## + results = [] + MAX_WORD_TOTAL = 4096 + n_txt = len(txt) + last_iteration_result = "从以下文本中提取摘要。" + if n_txt >= 20: print('文章极长,不能达到预期效果') + for i in range(n_txt): + NUM_OF_WORD = MAX_WORD_TOTAL // n_txt + i_say = f"Read this section, recapitulate the content of this section with less than {NUM_OF_WORD} words in Chinese: {txt[i]}" + i_say_show_user = f"[{i+1}/{n_txt}] Read this section, recapitulate the content of this section with less than {NUM_OF_WORD} words: {txt[i][:200]} ...." + gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive(i_say, i_say_show_user, # i_say=真正给chatgpt的提问, i_say_show_user=给用户看的提问 + llm_kwargs, chatbot, + history=["The main content of the previous section is?", last_iteration_result], # 迭代上一次的结果 + sys_prompt="Extracts the main content from the text section where it is located for graphing purposes, answer me with Chinese." # 提示 + ) + results.append(gpt_say) + last_iteration_result = gpt_say + ############################## <第 2 步,根据整理的摘要选择图表类型> ################################## + if ("advanced_arg" in plugin_kwargs) and (plugin_kwargs["advanced_arg"] == ""): plugin_kwargs.pop("advanced_arg") + gpt_say = plugin_kwargs.get("advanced_arg", "") #将图表类型参数赋值为插件参数 + results_txt = '\n'.join(results) #合并摘要 + if gpt_say not in ['1','2','3','4','5','6','7','8','9']: #如插件参数不正确则使用对话模型判断 + i_say_show_user = f'接下来将判断适合的图表类型,如连续3次判断失败将会使用流程图进行绘制'; gpt_say = "[Local Message] 收到。" # 用户提示 + chatbot.append([i_say_show_user, gpt_say]); yield from update_ui(chatbot=chatbot, history=[]) # 更新UI + i_say = SELECT_PROMPT.format(subject=results_txt) + i_say_show_user = f'请判断适合使用的流程图类型,其中数字对应关系为:1-流程图,2-序列图,3-类图,4-饼图,5-甘特图,6-状态图,7-实体关系图,8-象限提示图。由于不管提供文本是什么,模型大概率认为"思维导图"最合适,因此思维导图仅能通过参数调用。' + for i in range(3): + gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive( + inputs=i_say, + inputs_show_user=i_say_show_user, + llm_kwargs=llm_kwargs, chatbot=chatbot, history=[], + sys_prompt="" + ) + if gpt_say in ['1','2','3','4','5','6','7','8','9']: #判断返回是否正确 + break + if gpt_say not in ['1','2','3','4','5','6','7','8','9']: + gpt_say = '1' + ############################## <第 3 步,根据选择的图表类型绘制图表> ################################## + if gpt_say == '1': + i_say = PROMPT_1.format(subject=results_txt) + elif gpt_say == '2': + i_say = PROMPT_2.format(subject=results_txt) + elif gpt_say == '3': + i_say = PROMPT_3.format(subject=results_txt) + elif gpt_say == '4': + i_say = PROMPT_4.format(subject=results_txt) + elif gpt_say == '5': + i_say = PROMPT_5.format(subject=results_txt) + elif gpt_say == '6': + i_say = PROMPT_6.format(subject=results_txt) + elif gpt_say == '7': + i_say = PROMPT_7.replace("{subject}", results_txt) #由于实体关系图用到了{}符号 + elif gpt_say == '8': + i_say = PROMPT_8.format(subject=results_txt) + elif gpt_say == '9': + i_say = PROMPT_9.format(subject=results_txt) + i_say_show_user = f'请根据判断结果绘制相应的图表。如需绘制思维导图请使用参数调用,同时过大的图表可能需要复制到在线编辑器中进行渲染。' + gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive( + inputs=i_say, + inputs_show_user=i_say_show_user, + llm_kwargs=llm_kwargs, chatbot=chatbot, history=[], + sys_prompt="" + ) + history.append(gpt_say) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 界面更新 + +@CatchException +def 生成多种Mermaid图表(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + """ + txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 + llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 + plugin_kwargs 插件模型的参数,用于灵活调整复杂功能的各种参数 + chatbot 聊天显示框的句柄,用于显示给用户 + history 聊天历史,前情提要 + system_prompt 给gpt的静默提醒 + web_port 当前软件运行的端口号 + """ + import os + + # 基本信息:功能、贡献者 + chatbot.append([ + "函数插件功能?", + "根据当前聊天历史或指定的路径文件(文件内容优先)绘制多种mermaid图表,将会由对话模型首先判断适合的图表类型,随后绘制图表。\ + \n您也可以使用插件参数指定绘制的图表类型,函数插件贡献者: Menghuan1918"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + + if os.path.exists(txt): #如输入区无内容则直接解析历史记录 + from crazy_functions.pdf_fns.parse_word import extract_text_from_files + file_exist, final_result, page_one, file_manifest, excption = extract_text_from_files(txt, chatbot, history) + else: + file_exist = False + excption = "" + file_manifest = [] + + if excption != "": + if excption == "word": + report_exception(chatbot, history, + a = f"解析项目: {txt}", + b = f"找到了.doc文件,但是该文件格式不被支持,请先转化为.docx格式。") + + elif excption == "pdf": + report_exception(chatbot, history, + a = f"解析项目: {txt}", + b = f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade pymupdf```。") + + elif excption == "word_pip": + report_exception(chatbot, history, + a=f"解析项目: {txt}", + b=f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade python-docx pywin32```。") + + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + + else: + if not file_exist: + history.append(txt) #如输入区不是文件则将输入区内容加入历史记录 + i_say_show_user = f'首先你从历史记录中提取摘要。'; gpt_say = "[Local Message] 收到。" # 用户提示 + chatbot.append([i_say_show_user, gpt_say]); yield from update_ui(chatbot=chatbot, history=history) # 更新UI + yield from 解析历史输入(history,llm_kwargs,file_manifest,chatbot,plugin_kwargs) + else: + file_num = len(file_manifest) + for i in range(file_num): #依次处理文件 + i_say_show_user = f"[{i+1}/{file_num}]处理文件{file_manifest[i]}"; gpt_say = "[Local Message] 收到。" # 用户提示 + chatbot.append([i_say_show_user, gpt_say]); yield from update_ui(chatbot=chatbot, history=history) # 更新UI + history = [] #如输入区内容为文件则清空历史记录 + history.append(final_result[i]) + yield from 解析历史输入(history,llm_kwargs,file_manifest,chatbot,plugin_kwargs) \ No newline at end of file diff --git "a/crazy_functions/\347\237\245\350\257\206\345\272\223\351\227\256\347\255\224.py" "b/crazy_functions/\347\237\245\350\257\206\345\272\223\351\227\256\347\255\224.py" index e1cd00cac94530d3716633862cc5e73edec6789d..f3c7c9e3fc95305bc470d122c89a12f1786c94ee 100644 --- "a/crazy_functions/\347\237\245\350\257\206\345\272\223\351\227\256\347\255\224.py" +++ "b/crazy_functions/\347\237\245\350\257\206\345\272\223\351\227\256\347\255\224.py" @@ -13,7 +13,7 @@ install_msg =""" """ @CatchException -def 知识库文件注入(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 知识库文件注入(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数, 如温度和top_p等, 一般原样传递下去就行 @@ -21,7 +21,7 @@ def 知识库文件注入(txt, llm_kwargs, plugin_kwargs, chatbot, history, syst chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 @@ -84,7 +84,7 @@ def 知识库文件注入(txt, llm_kwargs, plugin_kwargs, chatbot, history, syst yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间,我们先及时地做一次界面更新 @CatchException -def 读取知识库作答(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port=-1): +def 读取知识库作答(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request=-1): # resolve deps try: # from zh_langchain import construct_vector_store diff --git "a/crazy_functions/\350\201\224\347\275\221\347\232\204ChatGPT.py" "b/crazy_functions/\350\201\224\347\275\221\347\232\204ChatGPT.py" index 6fa50ec2e39939371639076e508a0dce996e31ca..346492dbf5e07669856898b09b241f9c247c0cc6 100644 --- "a/crazy_functions/\350\201\224\347\275\221\347\232\204ChatGPT.py" +++ "b/crazy_functions/\350\201\224\347\275\221\347\232\204ChatGPT.py" @@ -55,7 +55,7 @@ def scrape_text(url, proxies) -> str: return text @CatchException -def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -63,7 +63,7 @@ def 连接网络回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, s chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 chatbot.append((f"请结合互联网信息回答以下问题:{txt}", diff --git "a/crazy_functions/\350\201\224\347\275\221\347\232\204ChatGPT_bing\347\211\210.py" "b/crazy_functions/\350\201\224\347\275\221\347\232\204ChatGPT_bing\347\211\210.py" index 009ebdce52dde5d0d85c6a31cf42fc62ae2a163e..eff6f8f9a97cd24abf504ba994c62d03b51c3346 100644 --- "a/crazy_functions/\350\201\224\347\275\221\347\232\204ChatGPT_bing\347\211\210.py" +++ "b/crazy_functions/\350\201\224\347\275\221\347\232\204ChatGPT_bing\347\211\210.py" @@ -55,7 +55,7 @@ def scrape_text(url, proxies) -> str: return text @CatchException -def 连接bing搜索回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 连接bing搜索回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -63,7 +63,7 @@ def 连接bing搜索回答问题(txt, llm_kwargs, plugin_kwargs, chatbot, histor chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 chatbot.append((f"请结合互联网信息回答以下问题:{txt}", diff --git "a/crazy_functions/\350\231\232\347\251\272\347\273\210\347\253\257.py" "b/crazy_functions/\350\231\232\347\251\272\347\273\210\347\253\257.py" index 439e71ca3d2b815d7964bf792d157202a4e8aa9d..27f449969d7b6aee345040a25c60656356102d74 100644 --- "a/crazy_functions/\350\231\232\347\251\272\347\273\210\347\253\257.py" +++ "b/crazy_functions/\350\231\232\347\251\272\347\273\210\347\253\257.py" @@ -104,7 +104,7 @@ def analyze_intention_with_simple_rules(txt): @CatchException -def 虚空终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 虚空终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): disable_auto_promotion(chatbot=chatbot) # 获取当前虚空终端状态 state = VoidTerminalState.get_state(chatbot) @@ -121,7 +121,7 @@ def 虚空终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt state.set_state(chatbot=chatbot, key='has_provided_explaination', value=True) state.unlock_plugin(chatbot=chatbot) yield from update_ui(chatbot=chatbot, history=history) - yield from 虚空终端主路由(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port) + yield from 虚空终端主路由(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request) return else: # 如果意图模糊,提示 @@ -133,7 +133,7 @@ def 虚空终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt -def 虚空终端主路由(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 虚空终端主路由(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] chatbot.append(("虚空终端状态: ", f"正在执行任务: {txt}")) yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 diff --git "a/crazy_functions/\350\247\243\346\236\220JupyterNotebook.py" "b/crazy_functions/\350\247\243\346\236\220JupyterNotebook.py" index 3c2b57831765d382b89979ac9a1818fd614a165e..2f2c088378822284c50487ea17473bde47214f58 100644 --- "a/crazy_functions/\350\247\243\346\236\220JupyterNotebook.py" +++ "b/crazy_functions/\350\247\243\346\236\220JupyterNotebook.py" @@ -12,6 +12,12 @@ class PaperFileGroup(): self.sp_file_index = [] self.sp_file_tag = [] + # count_token + from request_llms.bridge_all import model_info + enc = model_info["gpt-3.5-turbo"]['tokenizer'] + def get_token_num(txt): return len(enc.encode(txt, disallowed_special=())) + self.get_token_num = get_token_num + def run_file_split(self, max_token_limit=1900): """ 将长文本分离开来 @@ -54,7 +60,7 @@ def parseNotebook(filename, enable_markdown=1): Code += f"This is {idx+1}th code block: \n" Code += code+"\n" - return Code + return Code def ipynb解释(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): @@ -109,7 +115,7 @@ def ipynb解释(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbo yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 @CatchException -def 解析ipynb文件(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析ipynb文件(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): chatbot.append([ "函数插件功能?", "对IPynb文件进行解析。Contributor: codycjy."]) diff --git "a/crazy_functions/\350\247\243\346\236\220\351\241\271\347\233\256\346\272\220\344\273\243\347\240\201.py" "b/crazy_functions/\350\247\243\346\236\220\351\241\271\347\233\256\346\272\220\344\273\243\347\240\201.py" index e319d5a87435a1cbb410dffe823787a887e924b5..dfd0de0ef5c1d613cc57a5fe80efd5d594991d41 100644 --- "a/crazy_functions/\350\247\243\346\236\220\351\241\271\347\233\256\346\272\220\344\273\243\347\240\201.py" +++ "b/crazy_functions/\350\247\243\346\236\220\351\241\271\347\233\256\346\272\220\344\273\243\347\240\201.py" @@ -83,7 +83,8 @@ def 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, history=this_iteration_history_feed, # 迭代之前的分析 sys_prompt="你是一个程序架构分析师,正在分析一个项目的源代码。" + sys_prompt_additional) - summary = "请用一句话概括这些文件的整体功能" + diagram_code = make_diagram(this_iteration_files, result, this_iteration_history_feed) + summary = "请用一句话概括这些文件的整体功能。\n\n" + diagram_code summary_result = yield from request_gpt_model_in_new_thread_with_ui_alive( inputs=summary, inputs_show_user=summary, @@ -104,9 +105,12 @@ def 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot.append(("完成了吗?", res)) yield from update_ui(chatbot=chatbot, history=history_to_return) # 刷新界面 +def make_diagram(this_iteration_files, result, this_iteration_history_feed): + from crazy_functions.diagram_fns.file_tree import build_file_tree_mermaid_diagram + return build_file_tree_mermaid_diagram(this_iteration_history_feed[0::2], this_iteration_history_feed[1::2], "项目示意图") @CatchException -def 解析项目本身(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析项目本身(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob file_manifest = [f for f in glob.glob('./*.py')] + \ @@ -119,7 +123,7 @@ def 解析项目本身(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_ yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个Python项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个Python项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -137,7 +141,7 @@ def 解析一个Python项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, s yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个Matlab项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个Matlab项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -155,7 +159,7 @@ def 解析一个Matlab项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, s yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个C项目的头文件(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个C项目的头文件(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -175,7 +179,7 @@ def 解析一个C项目的头文件(txt, llm_kwargs, plugin_kwargs, chatbot, his yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个C项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个C项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -197,7 +201,7 @@ def 解析一个C项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system @CatchException -def 解析一个Java项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个Java项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -219,7 +223,7 @@ def 解析一个Java项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, sys @CatchException -def 解析一个前端项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个前端项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -248,7 +252,7 @@ def 解析一个前端项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, s @CatchException -def 解析一个Golang项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个Golang项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -269,7 +273,7 @@ def 解析一个Golang项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, s yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个Rust项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个Rust项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -289,7 +293,7 @@ def 解析一个Rust项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, sys yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个Lua项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个Lua项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -311,7 +315,7 @@ def 解析一个Lua项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, syst @CatchException -def 解析一个CSharp项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析一个CSharp项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -331,7 +335,7 @@ def 解析一个CSharp项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, s @CatchException -def 解析任意code项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 解析任意code项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): txt_pattern = plugin_kwargs.get("advanced_arg") txt_pattern = txt_pattern.replace(",", ",") # 将要匹配的模式(例如: *.c, *.cpp, *.py, config.toml) diff --git "a/crazy_functions/\350\257\242\351\227\256\345\244\232\344\270\252\345\244\247\350\257\255\350\250\200\346\250\241\345\236\213.py" "b/crazy_functions/\350\257\242\351\227\256\345\244\232\344\270\252\345\244\247\350\257\255\350\250\200\346\250\241\345\236\213.py" index 4210fb2136751da6b6d20a7381c4cd60eb8e8649..069d4407fb575216a5b640da9467cba616f3963f 100644 --- "a/crazy_functions/\350\257\242\351\227\256\345\244\232\344\270\252\345\244\247\350\257\255\350\250\200\346\250\241\345\236\213.py" +++ "b/crazy_functions/\350\257\242\351\227\256\345\244\232\344\270\252\345\244\247\350\257\255\350\250\200\346\250\241\345\236\213.py" @@ -2,7 +2,7 @@ from toolbox import CatchException, update_ui, get_conf from .crazy_utils import request_gpt_model_in_new_thread_with_ui_alive import datetime @CatchException -def 同时问询(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 同时问询(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -10,7 +10,7 @@ def 同时问询(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 MULTI_QUERY_LLM_MODELS = get_conf('MULTI_QUERY_LLM_MODELS') @@ -32,7 +32,7 @@ def 同时问询(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt @CatchException -def 同时问询_指定模型(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 同时问询_指定模型(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -40,7 +40,7 @@ def 同时问询_指定模型(txt, llm_kwargs, plugin_kwargs, chatbot, history, chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 diff --git "a/crazy_functions/\350\257\255\351\237\263\345\212\251\346\211\213.py" "b/crazy_functions/\350\257\255\351\237\263\345\212\251\346\211\213.py" index 3e93ceaed536e3cf1fe5129a0c5c379047d4ff3b..8af0fd999966bb44a79c68525a3192422233a3e5 100644 --- "a/crazy_functions/\350\257\255\351\237\263\345\212\251\346\211\213.py" +++ "b/crazy_functions/\350\257\255\351\237\263\345\212\251\346\211\213.py" @@ -166,7 +166,7 @@ class InterviewAssistant(AliyunASR): @CatchException -def 语音助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 语音助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): # pip install -U openai-whisper chatbot.append(["对话助手函数插件:使用时,双手离开鼠标键盘吧", "音频助手, 正在听您讲话(点击“停止”键可终止程序)..."]) yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 diff --git "a/crazy_functions/\350\257\273\346\226\207\347\253\240\345\206\231\346\221\230\350\246\201.py" "b/crazy_functions/\350\257\273\346\226\207\347\253\240\345\206\231\346\221\230\350\246\201.py" index a43b6aa29af0157e6b8eac8ed92770679b0f044e..48222a6ddd9719cb4375972d4353286bcb43fbd4 100644 --- "a/crazy_functions/\350\257\273\346\226\207\347\253\240\345\206\231\346\221\230\350\246\201.py" +++ "b/crazy_functions/\350\257\273\346\226\207\347\253\240\345\206\231\346\221\230\350\246\201.py" @@ -44,7 +44,7 @@ def 解析Paper(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbo @CatchException -def 读文章写摘要(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 读文章写摘要(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): diff --git "a/crazy_functions/\350\260\267\346\255\214\346\243\200\347\264\242\345\260\217\345\212\251\346\211\213.py" "b/crazy_functions/\350\260\267\346\255\214\346\243\200\347\264\242\345\260\217\345\212\251\346\211\213.py" index 14b21bfcd000803d8bb06164069d5096209d135c..8b7ea3ffce15e0ca6f0017987a974a3e4d183810 100644 --- "a/crazy_functions/\350\260\267\346\255\214\346\243\200\347\264\242\345\260\217\345\212\251\346\211\213.py" +++ "b/crazy_functions/\350\260\267\346\255\214\346\243\200\347\264\242\345\260\217\345\212\251\346\211\213.py" @@ -132,7 +132,7 @@ def get_meta_information(url, chatbot, history): return profile @CatchException -def 谷歌检索小助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 谷歌检索小助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): disable_auto_promotion(chatbot=chatbot) # 基本信息:功能、贡献者 chatbot.append([ diff --git "a/crazy_functions/\350\276\205\345\212\251\345\212\237\350\203\275.py" "b/crazy_functions/\350\276\205\345\212\251\345\212\237\350\203\275.py" index c5f874026c3319b9c191c92a3de592cf0e5db1e8..10f71ed6f1b82af08ba47e975195db7142b90942 100644 --- "a/crazy_functions/\350\276\205\345\212\251\345\212\237\350\203\275.py" +++ "b/crazy_functions/\350\276\205\345\212\251\345\212\237\350\203\275.py" @@ -11,7 +11,7 @@ import os @CatchException -def 猜你想问(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 猜你想问(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): if txt: show_say = txt prompt = txt+'\n回答完问题后,再列出用户可能提出的三个问题。' @@ -32,7 +32,7 @@ def 猜你想问(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt @CatchException -def 清除缓存(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 清除缓存(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): chatbot.append(['清除本地缓存数据', '执行中. 删除数据']) yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 diff --git "a/crazy_functions/\351\253\230\347\272\247\345\212\237\350\203\275\345\207\275\346\225\260\346\250\241\346\235\277.py" "b/crazy_functions/\351\253\230\347\272\247\345\212\237\350\203\275\345\207\275\346\225\260\346\250\241\346\235\277.py" index 0a9ac0e7f344804f0fd796a1da1a441c3cff68f4..d22a67411a2c94a06c342d041627c9f0afc031e7 100644 --- "a/crazy_functions/\351\253\230\347\272\247\345\212\237\350\203\275\345\207\275\346\225\260\346\250\241\346\235\277.py" +++ "b/crazy_functions/\351\253\230\347\272\247\345\212\237\350\203\275\345\207\275\346\225\260\346\250\241\346\235\277.py" @@ -1,19 +1,47 @@ from toolbox import CatchException, update_ui -from .crazy_utils import request_gpt_model_in_new_thread_with_ui_alive +from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_alive import datetime + +高阶功能模板函数示意图 = f""" +```mermaid +flowchart TD + %% 一个特殊标记,用于在生成mermaid图表时隐藏代码块 + subgraph 函数调用["函数调用过程"] + AA["输入栏用户输入的文本(txt)"] --> BB["gpt模型参数(llm_kwargs)"] + BB --> CC["插件模型参数(plugin_kwargs)"] + CC --> DD["对话显示框的句柄(chatbot)"] + DD --> EE["对话历史(history)"] + EE --> FF["系统提示词(system_prompt)"] + FF --> GG["当前用户信息(web_port)"] + + A["开始(查询5天历史事件)"] + A --> B["获取当前月份和日期"] + B --> C["生成历史事件查询提示词"] + C --> D["调用大模型"] + D --> E["更新界面"] + E --> F["记录历史"] + F --> |"下一天"| B + end +``` +""" + @CatchException -def 高阶功能模板函数(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 高阶功能模板函数(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ + # 高阶功能模板函数示意图:https://mermaid.live/edit#pako:eNptk1tvEkEYhv8KmattQpvlvOyFCcdeeaVXuoYssBwie8gyhCIlqVoLhrbbtAWNUpEGUkyMEDW2Fmn_DDOL_8LZHdOwxrnamX3f7_3mmZk6yKhZCfAgV1KrmYKoQ9fDuKC4yChX0nld1Aou1JzjznQ5fWmejh8LYHW6vG2a47YAnlCLNSIRolnenKBXI_zRIBrcuqRT890u7jZx7zMDt-AaMbnW1--5olGiz2sQjwfoQxsZL0hxplSSU0-rop4vrzmKR6O2JxYjHmwcL2Y_HDatVMkXlf86YzHbGY9bO5j8XE7O8Nsbc3iNB3ukL2SMcH-XIQBgWoVOZzxuOxOJOyc63EPGV6ZQLENVrznViYStTiaJ2vw2M2d9bByRnOXkgCnXylCSU5quyto_IcmkbdvctELmJ-j1ASW3uB3g5xOmKqVTmqr_Na3AtuS_dtBFm8H90XJyHkDDT7S9xXWb4HGmRChx64AOL5HRpUm411rM5uh4H78Z4V7fCZzytjZz2seto9XaNPFue07clLaVZF8UNLygJ-VES8lah_n-O-5Ozc7-77NzJ0-K0yr0ZYrmHdqAk50t2RbA4qq9uNohBASw7YpSgaRkLWCCAtxAlnRZLGbJba9bPwUAC5IsCYAnn1kpJ1ZKUACC0iBSsQLVBzUlA3ioVyQ3qGhZEUrxokiehAz4nFgqk1VNVABfB1uAD_g2_AGPl-W8nMcbCvsDblADfNCz4feyobDPy3rYEMtxwYYbPFNVUoHdCPmDHBv2cP4AMfrCbiBli-Q-3afv0X6WdsIjW2-10fgDy1SAig + txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 plugin_kwargs 插件模型的参数,用于灵活调整复杂功能的各种参数 chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 - chatbot.append(("这是什么功能?", "[Local Message] 请注意,您正在调用一个[函数插件]的模板,该函数面向希望实现更多有趣功能的开发者,它可以作为创建新功能函数的模板(该函数只有20多行代码)。此外我们也提供可同步处理大量文件的多线程Demo供您参考。您若希望分享新的功能模组,请不吝PR!")) + chatbot.append(( + "您正在调用插件:历史上的今天", + "[Local Message] 请注意,您正在调用一个[函数插件]的模板,该函数面向希望实现更多有趣功能的开发者,它可以作为创建新功能函数的模板(该函数只有20多行代码)。此外我们也提供可同步处理大量文件的多线程Demo供您参考。您若希望分享新的功能模组,请不吝PR!" + 高阶功能模板函数示意图)) yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间,我们先及时地做一次界面更新 for i in range(5): currentMonth = (datetime.date.today() + datetime.timedelta(days=i)).month @@ -43,7 +71,7 @@ graph TD ``` """ @CatchException -def 测试图表渲染(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): +def 测试图表渲染(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request): """ txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 @@ -51,7 +79,7 @@ def 测试图表渲染(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_ chatbot 聊天显示框的句柄,用于显示给用户 history 聊天历史,前情提要 system_prompt 给gpt的静默提醒 - web_port 当前软件运行的端口号 + user_request 当前用户的请求信息(IP地址等) """ history = [] # 清空历史,以免输入溢出 chatbot.append(("这是什么功能?", "一个测试mermaid绘制图表的功能,您可以在输入框中输入一些关键词,然后使用mermaid+llm绘制图表。")) diff --git a/docs/GithubAction+NoLocal+AudioAssistant b/docs/GithubAction+NoLocal+AudioAssistant index 9ea1ea54636d698a452be7de98d5413551ee39af..6d6dab0a5bfce8b62ce8c292924a6303229d425d 100644 --- a/docs/GithubAction+NoLocal+AudioAssistant +++ b/docs/GithubAction+NoLocal+AudioAssistant @@ -13,7 +13,7 @@ COPY . . RUN pip3 install -r requirements.txt # 安装语音插件的额外依赖 -RUN pip3 install pyOpenSSL scipy git+https://github.com/aliyun/alibabacloud-nls-python-sdk.git +RUN pip3 install aliyun-python-sdk-core==2.13.3 pyOpenSSL webrtcvad scipy git+https://github.com/aliyun/alibabacloud-nls-python-sdk.git # 可选步骤,用于预热模块 RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()' diff --git a/docs/self_analysis.md b/docs/self_analysis.md index 0b76c7bdd30db64df5663db841d7e24048dde108..e34b905d7bb93ee6af9c730aef23052323c0489e 100644 --- a/docs/self_analysis.md +++ b/docs/self_analysis.md @@ -165,7 +165,7 @@ toolbox.py是一个工具类库,其中主要包含了一些函数装饰器和 3. read_file_to_chat(chatbot, history, file_name):从传入的文件中读取内容,解析出对话历史记录并更新聊天显示框。 -4. 对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):一个主要函数,用于保存当前对话记录并提醒用户。如果用户希望加载历史记录,则调用read_file_to_chat()来更新聊天显示框。如果用户希望删除历史记录,调用删除所有本地对话历史记录()函数完成删除操作。 +4. 对话历史存档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, user_request):一个主要函数,用于保存当前对话记录并提醒用户。如果用户希望加载历史记录,则调用read_file_to_chat()来更新聊天显示框。如果用户希望删除历史记录,调用删除所有本地对话历史记录()函数完成删除操作。 ## [19/48] 请对下面的程序文件做一个概述: crazy_functions\总结word文档.py diff --git a/docs/translate_english.json b/docs/translate_english.json index 3920e1f60f8db3a8e25839470ef89816c74854bd..c7e0e66d74a992aa38d1b36bc2eb2e5934200ab9 100644 --- a/docs/translate_english.json +++ b/docs/translate_english.json @@ -1668,7 +1668,7 @@ "Markdown翻译指定语言": "TranslateMarkdownToSpecifiedLanguage", "Langchain知识库": "LangchainKnowledgeBase", "Latex英文纠错加PDF对比": "CorrectEnglishInLatexWithPDFComparison", - "Latex输出PDF结果": "OutputPDFFromLatex", + "Latex输出PDF": "OutputPDFFromLatex", "Latex翻译中文并重新编译PDF": "TranslateChineseToEnglishInLatexAndRecompilePDF", "sprint亮靛": "SprintIndigo", "寻找Latex主文件": "FindLatexMainFile", @@ -3004,5 +3004,7 @@ "1. 上传图片": "TranslatedText", "保存状态": "TranslatedText", "GPT-Academic对话存档": "TranslatedText", - "Arxiv论文精细翻译": "TranslatedText" + "Arxiv论文精细翻译": "TranslatedText", + "from crazy_functions.AdvancedFunctionTemplate import 测试图表渲染": "from crazy_functions.AdvancedFunctionTemplate import test_chart_rendering", + "测试图表渲染": "test_chart_rendering" } diff --git a/docs/translate_japanese.json b/docs/translate_japanese.json index 142e4a69e0e245348cee359e0d5b309ca0d0cc5b..a70f5df17b850760dee76f6433e6c028c4aed092 100644 --- a/docs/translate_japanese.json +++ b/docs/translate_japanese.json @@ -1492,7 +1492,7 @@ "交互功能模板函数": "InteractiveFunctionTemplateFunction", "交互功能函数模板": "InteractiveFunctionFunctionTemplate", "Latex英文纠错加PDF对比": "LatexEnglishErrorCorrectionWithPDFComparison", - "Latex输出PDF结果": "LatexOutputPDFResult", + "Latex输出PDF": "LatexOutputPDFResult", "Latex翻译中文并重新编译PDF": "TranslateChineseAndRecompilePDF", "语音助手": "VoiceAssistant", "微调数据集生成": "FineTuneDatasetGeneration", diff --git a/docs/translate_std.json b/docs/translate_std.json index 961e595bc4a979737d7acd93a0b3788133481e77..581d83e7e0b1d3911f6ce5af0a907949b06ab65c 100644 --- a/docs/translate_std.json +++ b/docs/translate_std.json @@ -16,7 +16,7 @@ "批量Markdown翻译": "BatchTranslateMarkdown", "连接bing搜索回答问题": "ConnectBingSearchAnswerQuestion", "Langchain知识库": "LangchainKnowledgeBase", - "Latex输出PDF结果": "OutputPDFFromLatex", + "Latex输出PDF": "OutputPDFFromLatex", "把字符太少的块清除为回车": "ClearBlocksWithTooFewCharactersToNewline", "Latex精细分解与转化": "DecomposeAndConvertLatex", "解析一个C项目的头文件": "ParseCProjectHeaderFiles", @@ -97,5 +97,12 @@ "多智能体": "MultiAgent", "图片生成_DALLE2": "ImageGeneration_DALLE2", "图片生成_DALLE3": "ImageGeneration_DALLE3", - "图片修改_DALLE2": "ImageModification_DALLE2" -} + "图片修改_DALLE2": "ImageModification_DALLE2", + "生成多种Mermaid图表": "GenerateMultipleMermaidCharts", + "知识库文件注入": "InjectKnowledgeBaseFiles", + "PDF翻译中文并重新编译PDF": "TranslatePDFToChineseAndRecompilePDF", + "随机小游戏": "RandomMiniGame", + "互动小游戏": "InteractiveMiniGame", + "解析历史输入": "ParseHistoricalInput", + "高阶功能模板函数示意图": "HighOrderFunctionTemplateDiagram" +} \ No newline at end of file diff --git a/docs/translate_traditionalchinese.json b/docs/translate_traditionalchinese.json index 4edc65de6997cc78460cad4e787018c2a6755d82..3378eda74fc53180be8323cb656be37cd25d6f3d 100644 --- a/docs/translate_traditionalchinese.json +++ b/docs/translate_traditionalchinese.json @@ -1468,7 +1468,7 @@ "交互功能模板函数": "InteractiveFunctionTemplateFunctions", "交互功能函数模板": "InteractiveFunctionFunctionTemplates", "Latex英文纠错加PDF对比": "LatexEnglishCorrectionWithPDFComparison", - "Latex输出PDF结果": "OutputPDFFromLatex", + "Latex输出PDF": "OutputPDFFromLatex", "Latex翻译中文并重新编译PDF": "TranslateLatexToChineseAndRecompilePDF", "语音助手": "VoiceAssistant", "微调数据集生成": "FineTuneDatasetGeneration", diff --git a/docs/use_audio.md b/docs/use_audio.md index 337c786820d3c02bfe4cae4911cac34615d8f006..0889325c9242e78d9fbf42704b0f3d8c61b18fb4 100644 --- a/docs/use_audio.md +++ b/docs/use_audio.md @@ -3,7 +3,7 @@ ## 1. 安装额外依赖 ``` -pip install --upgrade pyOpenSSL scipy git+https://github.com/aliyun/alibabacloud-nls-python-sdk.git +pip install --upgrade pyOpenSSL webrtcvad scipy git+https://github.com/aliyun/alibabacloud-nls-python-sdk.git ``` 如果因为特色网络问题导致上述命令无法执行: diff --git a/request_llms/bridge_all.py b/request_llms/bridge_all.py index c19691e8744249d4bef28eceb72dc07b4f2a97fe..e20570f93502c5a6dc633640c90f5968f7c70f1f 100644 --- a/request_llms/bridge_all.py +++ b/request_llms/bridge_all.py @@ -11,7 +11,7 @@ import tiktoken, copy from functools import lru_cache from concurrent.futures import ThreadPoolExecutor -from toolbox import get_conf, trimmed_format_exc +from toolbox import get_conf, trimmed_format_exc, apply_gpt_academic_string_mask from .bridge_chatgpt import predict_no_ui_long_connection as chatgpt_noui from .bridge_chatgpt import predict as chatgpt_ui @@ -31,6 +31,9 @@ from .bridge_qianfan import predict as qianfan_ui from .bridge_google_gemini import predict as genai_ui from .bridge_google_gemini import predict_no_ui_long_connection as genai_noui +from .bridge_zhipu import predict_no_ui_long_connection as zhipu_noui +from .bridge_zhipu import predict as zhipu_ui + colors = ['#FF00FF', '#00FFFF', '#FF0000', '#990099', '#009999', '#990044'] class LazyloadTiktoken(object): @@ -44,13 +47,13 @@ class LazyloadTiktoken(object): tmp = tiktoken.encoding_for_model(model) print('加载tokenizer完毕') return tmp - + def encode(self, *args, **kwargs): - encoder = self.get_encoder(self.model) + encoder = self.get_encoder(self.model) return encoder.encode(*args, **kwargs) - + def decode(self, *args, **kwargs): - encoder = self.get_encoder(self.model) + encoder = self.get_encoder(self.model) return encoder.decode(*args, **kwargs) # Endpoint 重定向 @@ -63,7 +66,7 @@ azure_endpoint = AZURE_ENDPOINT + f'openai/deployments/{AZURE_ENGINE}/chat/compl # 兼容旧版的配置 try: API_URL = get_conf("API_URL") - if API_URL != "https://api.openai.com/v1/chat/completions": + if API_URL != "https://api.openai.com/v1/chat/completions": openai_endpoint = API_URL print("警告!API_URL配置选项将被弃用,请更换为API_URL_REDIRECT配置") except: @@ -95,7 +98,7 @@ model_info = { "tokenizer": tokenizer_gpt35, "token_cnt": get_token_num_gpt35, }, - + "gpt-3.5-turbo-16k": { "fn_with_ui": chatgpt_ui, "fn_without_ui": chatgpt_noui, @@ -150,6 +153,15 @@ model_info = { "token_cnt": get_token_num_gpt4, }, + "gpt-4-turbo-preview": { + "fn_with_ui": chatgpt_ui, + "fn_without_ui": chatgpt_noui, + "endpoint": openai_endpoint, + "max_token": 128000, + "tokenizer": tokenizer_gpt4, + "token_cnt": get_token_num_gpt4, + }, + "gpt-4-1106-preview": { "fn_with_ui": chatgpt_ui, "fn_without_ui": chatgpt_noui, @@ -159,6 +171,15 @@ model_info = { "token_cnt": get_token_num_gpt4, }, + "gpt-4-0125-preview": { + "fn_with_ui": chatgpt_ui, + "fn_without_ui": chatgpt_noui, + "endpoint": openai_endpoint, + "max_token": 128000, + "tokenizer": tokenizer_gpt4, + "token_cnt": get_token_num_gpt4, + }, + "gpt-3.5-random": { "fn_with_ui": chatgpt_ui, "fn_without_ui": chatgpt_noui, @@ -167,7 +188,7 @@ model_info = { "tokenizer": tokenizer_gpt4, "token_cnt": get_token_num_gpt4, }, - + "gpt-4-vision-preview": { "fn_with_ui": chatgpt_vision_ui, "fn_without_ui": chatgpt_vision_noui, @@ -197,16 +218,25 @@ model_info = { "token_cnt": get_token_num_gpt4, }, - # api_2d (此后不需要在此处添加api2d的接口了,因为下面的代码会自动添加) - "api2d-gpt-3.5-turbo": { - "fn_with_ui": chatgpt_ui, - "fn_without_ui": chatgpt_noui, - "endpoint": api2d_endpoint, - "max_token": 4096, + # 智谱AI + "glm-4": { + "fn_with_ui": zhipu_ui, + "fn_without_ui": zhipu_noui, + "endpoint": None, + "max_token": 10124 * 8, + "tokenizer": tokenizer_gpt35, + "token_cnt": get_token_num_gpt35, + }, + "glm-3-turbo": { + "fn_with_ui": zhipu_ui, + "fn_without_ui": zhipu_noui, + "endpoint": None, + "max_token": 10124 * 4, "tokenizer": tokenizer_gpt35, "token_cnt": get_token_num_gpt35, }, + # api_2d (此后不需要在此处添加api2d的接口了,因为下面的代码会自动添加) "api2d-gpt-4": { "fn_with_ui": chatgpt_ui, "fn_without_ui": chatgpt_noui, @@ -530,7 +560,7 @@ if "sparkv2" in AVAIL_LLM_MODELS: # 讯飞星火认知大模型 }) except: print(trimmed_format_exc()) -if "sparkv3" in AVAIL_LLM_MODELS: # 讯飞星火认知大模型 +if "sparkv3" in AVAIL_LLM_MODELS or "sparkv3.5" in AVAIL_LLM_MODELS: # 讯飞星火认知大模型 try: from .bridge_spark import predict_no_ui_long_connection as spark_noui from .bridge_spark import predict as spark_ui @@ -542,6 +572,14 @@ if "sparkv3" in AVAIL_LLM_MODELS: # 讯飞星火认知大模型 "max_token": 4096, "tokenizer": tokenizer_gpt35, "token_cnt": get_token_num_gpt35, + }, + "sparkv3.5": { + "fn_with_ui": spark_ui, + "fn_without_ui": spark_noui, + "endpoint": None, + "max_token": 4096, + "tokenizer": tokenizer_gpt35, + "token_cnt": get_token_num_gpt35, } }) except: @@ -562,19 +600,17 @@ if "llama2" in AVAIL_LLM_MODELS: # llama2 }) except: print(trimmed_format_exc()) -if "zhipuai" in AVAIL_LLM_MODELS: # zhipuai +if "zhipuai" in AVAIL_LLM_MODELS: # zhipuai 是glm-4的别名,向后兼容配置 try: - from .bridge_zhipu import predict_no_ui_long_connection as zhipu_noui - from .bridge_zhipu import predict as zhipu_ui model_info.update({ "zhipuai": { "fn_with_ui": zhipu_ui, "fn_without_ui": zhipu_noui, "endpoint": None, - "max_token": 4096, + "max_token": 10124 * 8, "tokenizer": tokenizer_gpt35, "token_cnt": get_token_num_gpt35, - } + }, }) except: print(trimmed_format_exc()) @@ -617,7 +653,7 @@ AZURE_CFG_ARRAY = get_conf("AZURE_CFG_ARRAY") if len(AZURE_CFG_ARRAY) > 0: for azure_model_name, azure_cfg_dict in AZURE_CFG_ARRAY.items(): # 可能会覆盖之前的配置,但这是意料之中的 - if not azure_model_name.startswith('azure'): + if not azure_model_name.startswith('azure'): raise ValueError("AZURE_CFG_ARRAY中配置的模型必须以azure开头") endpoint_ = azure_cfg_dict["AZURE_ENDPOINT"] + \ f'openai/deployments/{azure_cfg_dict["AZURE_ENGINE"]}/chat/completions?api-version=2023-05-15' @@ -668,6 +704,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser """ import threading, time, copy + inputs = apply_gpt_academic_string_mask(inputs, mode="show_llm") model = llm_kwargs['llm_model'] n_model = 1 if '&' not in model: @@ -682,7 +719,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser executor = ThreadPoolExecutor(max_workers=4) models = model.split('&') n_model = len(models) - + window_len = len(observe_window) assert window_len==3 window_mutex = [["", time.time(), ""] for _ in range(n_model)] + [True] @@ -701,7 +738,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser time.sleep(0.25) if not window_mutex[-1]: break # 看门狗(watchdog) - for i in range(n_model): + for i in range(n_model): window_mutex[i][1] = observe_window[1] # 观察窗(window) chat_string = [] @@ -741,6 +778,7 @@ def predict(inputs, llm_kwargs, *args, **kwargs): additional_fn代表点击的哪个按钮,按钮见functional.py """ + inputs = apply_gpt_academic_string_mask(inputs, mode="show_llm") method = model_info[llm_kwargs['llm_model']]["fn_with_ui"] # 如果这里报错,检查config中的AVAIL_LLM_MODELS选项 yield from method(inputs, llm_kwargs, *args, **kwargs) diff --git a/request_llms/bridge_chatgpt.py b/request_llms/bridge_chatgpt.py index 660d5ddf1abb0aea10c036a09d42cbc1ac332f04..ecb8423b4621dfe4ccedf3c679e8b007389112ab 100644 --- a/request_llms/bridge_chatgpt.py +++ b/request_llms/bridge_chatgpt.py @@ -113,6 +113,8 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", error_msg = get_full_error(chunk, stream_response).decode() if "reduce the length" in error_msg: raise ConnectionAbortedError("OpenAI拒绝了请求:" + error_msg) + elif """type":"upstream_error","param":"307""" in error_msg: + raise ConnectionAbortedError("正常结束,但显示Token不足,导致输出不完整,请削减单次输入的文本量。") else: raise RuntimeError("OpenAI拒绝了请求:" + error_msg) if ('data: [DONE]' in chunk_decoded): break # api2d 正常完成 diff --git a/request_llms/bridge_google_gemini.py b/request_llms/bridge_google_gemini.py index 48e54190b0488c525eee1466be8deb03440d0081..cb85ecb6d342481d2036695dfab8b2a63cd8c70a 100644 --- a/request_llms/bridge_google_gemini.py +++ b/request_llms/bridge_google_gemini.py @@ -57,6 +57,10 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if "vision" in llm_kwargs["llm_model"]: have_recent_file, image_paths = have_any_recent_upload_image_files(chatbot) + if not have_recent_file: + chatbot.append((inputs, "没有检测到任何近期上传的图像文件,请上传jpg格式的图片,此外,请注意拓展名需要小写")) + yield from update_ui(chatbot=chatbot, history=history, msg="等待图片") # 刷新界面 + return def make_media_input(inputs, image_paths): for image_path in image_paths: inputs = inputs + f'

' diff --git a/request_llms/bridge_qianfan.py b/request_llms/bridge_qianfan.py index a806e0d4e58f03288a2bb7bb730c98f37e8a24da..0f02457b23847e32efd10726a65d719d904e6aa5 100644 --- a/request_llms/bridge_qianfan.py +++ b/request_llms/bridge_qianfan.py @@ -146,21 +146,17 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp yield from update_ui(chatbot=chatbot, history=history) # 开始接收回复 try: + response = f"[Local Message] 等待{model_name}响应中 ..." for response in generate_from_baidu_qianfan(inputs, llm_kwargs, history, system_prompt): chatbot[-1] = (inputs, response) yield from update_ui(chatbot=chatbot, history=history) + history.extend([inputs, response]) + yield from update_ui(chatbot=chatbot, history=history) except ConnectionAbortedError as e: from .bridge_all import model_info if len(history) >= 2: history[-1] = ""; history[-2] = "" # 清除当前溢出的输入:history[-2] 是本次输入, history[-1] 是本次输出 - history = clip_history(inputs=inputs, history=history, tokenizer=model_info[llm_kwargs['llm_model']]['tokenizer'], + history = clip_history(inputs=inputs, history=history, tokenizer=model_info[llm_kwargs['llm_model']]['tokenizer'], max_token_limit=(model_info[llm_kwargs['llm_model']]['max_token'])) # history至少释放二分之一 chatbot[-1] = (chatbot[-1][0], "[Local Message] Reduce the length. 本次输入过长, 或历史数据过长. 历史缓存数据已部分释放, 您可以请再次尝试. (若再次失败则更可能是因为输入过长.)") yield from update_ui(chatbot=chatbot, history=history, msg="异常") # 刷新界面 return - - # 总结输出 - response = f"[Local Message] {model_name}响应异常 ..." - if response == f"[Local Message] 等待{model_name}响应中 ...": - response = f"[Local Message] {model_name}响应异常 ..." - history.extend([inputs, response]) - yield from update_ui(chatbot=chatbot, history=history) \ No newline at end of file diff --git a/request_llms/bridge_qwen.py b/request_llms/bridge_qwen.py index 18877b94e5eaf5bb47b2cb4b6b0b4e5f9a0ec19c..808c2c75c42b03c108374324ea9b775ab19c378b 100644 --- a/request_llms/bridge_qwen.py +++ b/request_llms/bridge_qwen.py @@ -51,6 +51,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp # 开始接收回复 from .com_qwenapi import QwenRequestInstance sri = QwenRequestInstance() + response = f"[Local Message] 等待{model_name}响应中 ..." for response in sri.generate(inputs, llm_kwargs, history, system_prompt): chatbot[-1] = (inputs, response) yield from update_ui(chatbot=chatbot, history=history) diff --git a/request_llms/bridge_skylark2.py b/request_llms/bridge_skylark2.py index 8f10b839355cb95922a2499d85ff598e17c44352..1a8edcbca842ce642a3abe68813593754e307487 100644 --- a/request_llms/bridge_skylark2.py +++ b/request_llms/bridge_skylark2.py @@ -56,6 +56,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp # 开始接收回复 from .com_skylark2api import YUNQUERequestInstance sri = YUNQUERequestInstance() + response = f"[Local Message] 等待{model_name}响应中 ..." for response in sri.generate(inputs, llm_kwargs, history, system_prompt): chatbot[-1] = (inputs, response) yield from update_ui(chatbot=chatbot, history=history) diff --git a/request_llms/bridge_spark.py b/request_llms/bridge_spark.py index 1fe31ce9edcc24135ea185e82c241229aaed360b..8449494c13a60d736058616b4264c589dbb35430 100644 --- a/request_llms/bridge_spark.py +++ b/request_llms/bridge_spark.py @@ -9,7 +9,7 @@ model_name = '星火认知大模型' def validate_key(): XFYUN_APPID = get_conf('XFYUN_APPID') - if XFYUN_APPID == '00000000' or XFYUN_APPID == '': + if XFYUN_APPID == '00000000' or XFYUN_APPID == '': return False return True @@ -49,9 +49,10 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp from core_functional import handle_core_functionality inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) - # 开始接收回复 + # 开始接收回复 from .com_sparkapi import SparkRequestInstance sri = SparkRequestInstance() + response = f"[Local Message] 等待{model_name}响应中 ..." for response in sri.generate(inputs, llm_kwargs, history, system_prompt, use_image_api=True): chatbot[-1] = (inputs, response) yield from update_ui(chatbot=chatbot, history=history) diff --git a/request_llms/bridge_zhipu.py b/request_llms/bridge_zhipu.py index 91903ad37b379e02a6642b93109db5d01a7ea07c..ecb3b7550e499c73a784acd3f966fb7f635bb1ed 100644 --- a/request_llms/bridge_zhipu.py +++ b/request_llms/bridge_zhipu.py @@ -1,15 +1,21 @@ - import time +import os from toolbox import update_ui, get_conf, update_ui_lastest_msg -from toolbox import check_packages, report_exception +from toolbox import check_packages, report_exception, have_any_recent_upload_image_files model_name = '智谱AI大模型' +zhipuai_default_model = 'glm-4' def validate_key(): ZHIPUAI_API_KEY = get_conf("ZHIPUAI_API_KEY") if ZHIPUAI_API_KEY == '': return False return True +def make_media_input(inputs, image_paths): + for image_path in image_paths: + inputs = inputs + f'

' + return inputs + def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", observe_window=[], console_slience=False): """ ⭐多线程方法 @@ -18,34 +24,40 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", watch_dog_patience = 5 response = "" + if llm_kwargs["llm_model"] == "zhipuai": + llm_kwargs["llm_model"] = zhipuai_default_model + if validate_key() is False: raise RuntimeError('请配置ZHIPUAI_API_KEY') - from .com_zhipuapi import ZhipuRequestInstance - sri = ZhipuRequestInstance() - for response in sri.generate(inputs, llm_kwargs, history, sys_prompt): + # 开始接收回复 + from .com_zhipuglm import ZhipuChatInit + zhipu_bro_init = ZhipuChatInit() + for chunk, response in zhipu_bro_init.generate_chat(inputs, llm_kwargs, history, sys_prompt): if len(observe_window) >= 1: observe_window[0] = response if len(observe_window) >= 2: - if (time.time()-observe_window[1]) > watch_dog_patience: raise RuntimeError("程序终止。") + if (time.time() - observe_window[1]) > watch_dog_patience: + raise RuntimeError("程序终止。") return response -def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_prompt='', stream = True, additional_fn=None): + +def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_prompt='', stream=True, additional_fn=None): """ ⭐单线程方法 函数的说明请见 request_llms/bridge_all.py """ - chatbot.append((inputs, "")) + chatbot.append([inputs, ""]) yield from update_ui(chatbot=chatbot, history=history) # 尝试导入依赖,如果缺少依赖,则给出安装建议 try: check_packages(["zhipuai"]) except: - yield from update_ui_lastest_msg(f"导入软件依赖失败。使用该模型需要额外依赖,安装方法```pip install zhipuai==1.0.7```。", - chatbot=chatbot, history=history, delay=0) + yield from update_ui_lastest_msg(f"导入软件依赖失败。使用该模型需要额外依赖,安装方法```pip install --upgrade zhipuai```。", + chatbot=chatbot, history=history, delay=0) return - + if validate_key() is False: yield from update_ui_lastest_msg(lastmsg="[Local Message] 请配置ZHIPUAI_API_KEY", chatbot=chatbot, history=history, delay=0) return @@ -53,16 +65,29 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if additional_fn is not None: from core_functional import handle_core_functionality inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) - - # 开始接收回复 - from .com_zhipuapi import ZhipuRequestInstance - sri = ZhipuRequestInstance() - for response in sri.generate(inputs, llm_kwargs, history, system_prompt): - chatbot[-1] = (inputs, response) + chatbot[-1] = [inputs, ""] yield from update_ui(chatbot=chatbot, history=history) - # 总结输出 - if response == f"[Local Message] 等待{model_name}响应中 ...": - response = f"[Local Message] {model_name}响应异常 ..." + if llm_kwargs["llm_model"] == "zhipuai": + llm_kwargs["llm_model"] = zhipuai_default_model + + if llm_kwargs["llm_model"] in ["glm-4v"]: + have_recent_file, image_paths = have_any_recent_upload_image_files(chatbot) + if not have_recent_file: + chatbot.append((inputs, "没有检测到任何近期上传的图像文件,请上传jpg格式的图片,此外,请注意拓展名需要小写")) + yield from update_ui(chatbot=chatbot, history=history, msg="等待图片") # 刷新界面 + return + if have_recent_file: + inputs = make_media_input(inputs, image_paths) + chatbot[-1] = [inputs, ""] + yield from update_ui(chatbot=chatbot, history=history) + + + # 开始接收回复 + from .com_zhipuglm import ZhipuChatInit + zhipu_bro_init = ZhipuChatInit() + for chunk, response in zhipu_bro_init.generate_chat(inputs, llm_kwargs, history, system_prompt): + chatbot[-1] = [inputs, response] + yield from update_ui(chatbot=chatbot, history=history) history.extend([inputs, response]) yield from update_ui(chatbot=chatbot, history=history) \ No newline at end of file diff --git a/request_llms/com_google.py b/request_llms/com_google.py index 211581af5aea81aee1b5d74e28f80e7e371bedda..e66d659af5a0fb99ff803162f4fd0b6e5505ee29 100644 --- a/request_llms/com_google.py +++ b/request_llms/com_google.py @@ -7,7 +7,7 @@ import os import re import requests from typing import List, Dict, Tuple -from toolbox import get_conf, encode_image, get_pictures_list +from toolbox import get_conf, encode_image, get_pictures_list, to_markdown_tabs proxies, TIMEOUT_SECONDS = get_conf("proxies", "TIMEOUT_SECONDS") @@ -112,34 +112,6 @@ def html_local_img(__file, layout="left", max_width=None, max_height=None, md=Tr return a -def to_markdown_tabs(head: list, tabs: list, alignment=":---:", column=False): - """ - Args: - head: 表头:[] - tabs: 表值:[[列1], [列2], [列3], [列4]] - alignment: :--- 左对齐, :---: 居中对齐, ---: 右对齐 - column: True to keep data in columns, False to keep data in rows (default). - Returns: - A string representation of the markdown table. - """ - if column: - transposed_tabs = list(map(list, zip(*tabs))) - else: - transposed_tabs = tabs - # Find the maximum length among the columns - max_len = max(len(column) for column in transposed_tabs) - - tab_format = "| %s " - tabs_list = "".join([tab_format % i for i in head]) + "|\n" - tabs_list += "".join([tab_format % alignment for i in head]) + "|\n" - - for i in range(max_len): - row_data = [tab[i] if i < len(tab) else "" for tab in transposed_tabs] - row_data = file_manifest_filter_html(row_data, filter_=None) - tabs_list += "".join([tab_format % i for i in row_data]) + "|\n" - - return tabs_list - class GoogleChatInit: def __init__(self): diff --git a/request_llms/com_sparkapi.py b/request_llms/com_sparkapi.py index 3f667c18d40593d06cd746ce002f86036d1e5966..359e407ae51a945e87acab6f6363fbb4a6507a0a 100644 --- a/request_llms/com_sparkapi.py +++ b/request_llms/com_sparkapi.py @@ -65,6 +65,7 @@ class SparkRequestInstance(): self.gpt_url = "ws://spark-api.xf-yun.com/v1.1/chat" self.gpt_url_v2 = "ws://spark-api.xf-yun.com/v2.1/chat" self.gpt_url_v3 = "ws://spark-api.xf-yun.com/v3.1/chat" + self.gpt_url_v35 = "wss://spark-api.xf-yun.com/v3.5/chat" self.gpt_url_img = "wss://spark-api.cn-huabei-1.xf-yun.com/v2.1/image" self.time_to_yield_event = threading.Event() @@ -91,6 +92,8 @@ class SparkRequestInstance(): gpt_url = self.gpt_url_v2 elif llm_kwargs['llm_model'] == 'sparkv3': gpt_url = self.gpt_url_v3 + elif llm_kwargs['llm_model'] == 'sparkv3.5': + gpt_url = self.gpt_url_v35 else: gpt_url = self.gpt_url file_manifest = [] @@ -190,6 +193,7 @@ def gen_params(appid, inputs, llm_kwargs, history, system_prompt, file_manifest) "spark": "general", "sparkv2": "generalv2", "sparkv3": "generalv3", + "sparkv3.5": "generalv3.5", } domains_select = domains[llm_kwargs['llm_model']] if file_manifest: domains_select = 'image' diff --git a/request_llms/com_zhipuglm.py b/request_llms/com_zhipuglm.py new file mode 100644 index 0000000000000000000000000000000000000000..2e96d3fd87b1759b0dd52206b5dc03b7b760aa52 --- /dev/null +++ b/request_llms/com_zhipuglm.py @@ -0,0 +1,84 @@ +# encoding: utf-8 +# @Time : 2024/1/22 +# @Author : Kilig947 & binary husky +# @Descr : 兼容最新的智谱Ai +from toolbox import get_conf +from zhipuai import ZhipuAI +from toolbox import get_conf, encode_image, get_pictures_list +import logging, os + + +def input_encode_handler(inputs, llm_kwargs): + if llm_kwargs["most_recent_uploaded"].get("path"): + image_paths = get_pictures_list(llm_kwargs["most_recent_uploaded"]["path"]) + md_encode = [] + for md_path in image_paths: + type_ = os.path.splitext(md_path)[1].replace(".", "") + type_ = "jpeg" if type_ == "jpg" else type_ + md_encode.append({"data": encode_image(md_path), "type": type_}) + return inputs, md_encode + + +class ZhipuChatInit: + + def __init__(self): + ZHIPUAI_API_KEY, ZHIPUAI_MODEL = get_conf("ZHIPUAI_API_KEY", "ZHIPUAI_MODEL") + if len(ZHIPUAI_MODEL) > 0: + logging.error('ZHIPUAI_MODEL 配置项选项已经弃用,请在LLM_MODEL中配置') + self.zhipu_bro = ZhipuAI(api_key=ZHIPUAI_API_KEY) + self.model = '' + + def __conversation_user(self, user_input: str, llm_kwargs): + if self.model not in ["glm-4v"]: + return {"role": "user", "content": user_input} + else: + input_, encode_img = input_encode_handler(user_input, llm_kwargs=llm_kwargs) + what_i_have_asked = {"role": "user", "content": []} + what_i_have_asked['content'].append({"type": 'text', "text": user_input}) + if encode_img: + img_d = {"type": "image_url", + "image_url": {'url': encode_img}} + what_i_have_asked['content'].append(img_d) + return what_i_have_asked + + def __conversation_history(self, history, llm_kwargs): + messages = [] + conversation_cnt = len(history) // 2 + if conversation_cnt: + for index in range(0, 2 * conversation_cnt, 2): + what_i_have_asked = self.__conversation_user(history[index], llm_kwargs) + what_gpt_answer = { + "role": "assistant", + "content": history[index + 1] + } + messages.append(what_i_have_asked) + messages.append(what_gpt_answer) + return messages + + def __conversation_message_payload(self, inputs, llm_kwargs, history, system_prompt): + messages = [] + if system_prompt: + messages.append({"role": "system", "content": system_prompt}) + self.model = llm_kwargs['llm_model'] + messages.extend(self.__conversation_history(history, llm_kwargs)) # 处理 history + messages.append(self.__conversation_user(inputs, llm_kwargs)) # 处理用户对话 + response = self.zhipu_bro.chat.completions.create( + model=self.model, messages=messages, stream=True, + temperature=llm_kwargs.get('temperature', 0.95) * 0.95, # 只能传默认的 temperature 和 top_p + top_p=llm_kwargs.get('top_p', 0.7) * 0.7, + max_tokens=llm_kwargs.get('max_tokens', 1024 * 4), # 最大输出模型的一半 + ) + return response + + def generate_chat(self, inputs, llm_kwargs, history, system_prompt): + self.model = llm_kwargs['llm_model'] + response = self.__conversation_message_payload(inputs, llm_kwargs, history, system_prompt) + bro_results = '' + for chunk in response: + bro_results += chunk.choices[0].delta.content + yield chunk.choices[0].delta.content, bro_results + + +if __name__ == '__main__': + zhipu = ZhipuChatInit() + zhipu.generate_chat('你好', {'llm_model': 'glm-4'}, [], '你是WPSAi') diff --git a/request_llms/moss b/request_llms/moss new file mode 160000 index 0000000000000000000000000000000000000000..4d905bcead53739d4395b145cae2be308b1df795 --- /dev/null +++ b/request_llms/moss @@ -0,0 +1 @@ +Subproject commit 4d905bcead53739d4395b145cae2be308b1df795 diff --git a/requirements.txt b/requirements.txt index f13dcc5b857a71cdb1d1d895724f472a5c69d9d6..007c5a775de882e9e83ddceb1904d7be1b9fffac 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,10 @@ -pydantic==1.10.11 +https://public.agent-matrix.com/publish/gradio-3.32.8-py3-none-any.whl +gradio-client==0.8 pypdf2==2.12.1 -zhipuai<2 +zhipuai>=2 tiktoken>=0.3.3 requests[socks] +pydantic==2.5.2 protobuf==3.18 transformers>=4.27.1 scipdf_parser>=0.52 diff --git a/shared_utils/advanced_markdown_format.py b/shared_utils/advanced_markdown_format.py index 9eed4f09bffaa90fe28ae08ea31353508d7c8f8f..a015fd608914ed41982be743bc45ca9541d2166c 100644 --- a/shared_utils/advanced_markdown_format.py +++ b/shared_utils/advanced_markdown_format.py @@ -4,62 +4,47 @@ import os import math from textwrap import dedent from functools import lru_cache -from pymdownx.superfences import fence_div_format, fence_code_format +from pymdownx.superfences import fence_code_format from latex2mathml.converter import convert as tex2mathml from shared_utils.config_loader import get_conf as get_conf - -pj = os.path.join -default_user_name = 'default_user' +from shared_utils.text_mask import apply_gpt_academic_string_mask markdown_extension_configs = { - 'mdx_math': { - 'enable_dollar_delimiter': True, - 'use_gitlab_delimiters': False, + "mdx_math": { + "enable_dollar_delimiter": True, + "use_gitlab_delimiters": False, }, } code_highlight_configs = { "pymdownx.superfences": { - 'css_class': 'codehilite', + "css_class": "codehilite", "custom_fences": [ - { - 'name': 'mermaid', - 'class': 'mermaid', - 'format': fence_code_format - } - ] + {"name": "mermaid", "class": "mermaid", "format": fence_code_format} + ], }, "pymdownx.highlight": { - 'css_class': 'codehilite', - 'guess_lang': True, + "css_class": "codehilite", + "guess_lang": True, # 'auto_title': True, # 'linenums': True - } + }, } -def text_divide_paragraph(text): - """ - 将文本按照段落分隔符分割开,生成带有段落标签的HTML代码。 - """ - pre = '
' - suf = '
' - if text.startswith(pre) and text.endswith(suf): - return text - - if '```' in text: - # careful input - return text - elif '' in text: - # careful input - return text - else: - # whatever input - lines = text.split("\n") - for i, line in enumerate(lines): - lines[i] = lines[i].replace(" ", " ") - text = "
".join(lines) - return pre + text + suf - +code_highlight_configs_block_mermaid = { + "pymdownx.superfences": { + "css_class": "codehilite", + # "custom_fences": [ + # {"name": "mermaid", "class": "mermaid", "format": fence_code_format} + # ], + }, + "pymdownx.highlight": { + "css_class": "codehilite", + "guess_lang": True, + # 'auto_title': True, + # 'linenums': True + }, +} def tex2mathml_catch_exception(content, *args, **kwargs): try: @@ -71,20 +56,20 @@ def tex2mathml_catch_exception(content, *args, **kwargs): def replace_math_no_render(match): content = match.group(1) - if 'mode=display' in match.group(0): - content = content.replace('\n', '
') - return f"$${content}$$" + if "mode=display" in match.group(0): + content = content.replace("\n", "
") + return f'$${content}$$' else: - return f"${content}$" + return f'${content}$' def replace_math_render(match): content = match.group(1) - if 'mode=display' in match.group(0): - if '\\begin{aligned}' in content: - content = content.replace('\\begin{aligned}', '\\begin{array}') - content = content.replace('\\end{aligned}', '\\end{array}') - content = content.replace('&', ' ') + if "mode=display" in match.group(0): + if "\\begin{aligned}" in content: + content = content.replace("\\begin{aligned}", "\\begin{array}") + content = content.replace("\\end{aligned}", "\\end{array}") + content = content.replace("&", " ") content = tex2mathml_catch_exception(content, display="block") return content else: @@ -95,9 +80,11 @@ def markdown_bug_hunt(content): """ 解决一个mdx_math的bug(单$包裹begin命令时多余\n', '') + content = content.replace( + '\n", "") return content @@ -105,25 +92,29 @@ def is_equation(txt): """ 判定是否为公式 | 测试1 写出洛伦兹定律,使用tex格式公式 测试2 给出柯西不等式,使用latex格式 测试3 写出麦克斯韦方程组 """ - if '```' in txt and '```reference' not in txt: return False - if '$' not in txt and '\\[' not in txt: return False + if "```" in txt and "```reference" not in txt: + return False + if "$" not in txt and "\\[" not in txt: + return False mathpatterns = { - r'(?^[ \t]*(?:~{3,}|`{3,}))[ ]* # opening fence ((\{(?P[^\}\n]*)\})| # (optional {attrs} or (\.?(?P[\w#.+-]*)[ ]*)? # optional (.)lang @@ -162,16 +154,17 @@ FENCED_BLOCK_RE = re.compile( \n # newline (end of opening fence) (?P.*?)(?<=\n) # the code block (?P=fence)[ ]*$ # closing fence - '''), - re.MULTILINE | re.DOTALL | re.VERBOSE + """ + ), + re.MULTILINE | re.DOTALL | re.VERBOSE, ) def get_line_range(re_match_obj, txt): start_pos, end_pos = re_match_obj.regs[0] - num_newlines_before = txt[:start_pos+1].count('\n') + num_newlines_before = txt[: start_pos + 1].count("\n") line_start = num_newlines_before - line_end = num_newlines_before + txt[start_pos:end_pos].count('\n')+1 + line_end = num_newlines_before + txt[start_pos:end_pos].count("\n") + 1 return line_start, line_end @@ -181,14 +174,16 @@ def fix_code_segment_indent(txt): txt_tmp = txt while True: re_match_obj = FENCED_BLOCK_RE.search(txt_tmp) - if not re_match_obj: break - if len(lines) == 0: lines = txt.split("\n") - + if not re_match_obj: + break + if len(lines) == 0: + lines = txt.split("\n") + # 清空 txt_tmp 对应的位置方便下次搜索 start_pos, end_pos = re_match_obj.regs[0] - txt_tmp = txt_tmp[:start_pos] + ' '*(end_pos-start_pos) + txt_tmp[end_pos:] + txt_tmp = txt_tmp[:start_pos] + " " * (end_pos - start_pos) + txt_tmp[end_pos:] line_start, line_end = get_line_range(re_match_obj, txt) - + # 获取公共缩进 shared_indent_cnt = 1e5 for i in range(line_start, line_end): @@ -202,26 +197,26 @@ def fix_code_segment_indent(txt): num_spaces_should_be = math.ceil(shared_indent_cnt / 4) * 4 for i in range(line_start, line_end): add_n = num_spaces_should_be - shared_indent_cnt - lines[i] = ' ' * add_n + lines[i] - if not change_any: # 遇到第一个 + lines[i] = " " * add_n + lines[i] + if not change_any: # 遇到第一个 change_any = True if change_any: - return '\n'.join(lines) + return "\n".join(lines) else: return txt - - -@lru_cache(maxsize=128) # 使用 lru缓存 加快转换速度 + + +@lru_cache(maxsize=128) # 使用 lru缓存 加快转换速度 def markdown_convertion(txt): """ 将Markdown格式的文本转换为HTML格式。如果包含数学公式,则先将公式转换为HTML格式。 """ pre = '
' - suf = '
' + suf = "" if txt.startswith(pre) and txt.endswith(suf): # print('警告,输入了已经经过转化的字符串,二次转化可能出问题') - return txt # 已经被转化过,不需要再次转化 + return txt # 已经被转化过,不需要再次转化 find_equation_pattern = r'\n""" + + # 添加Live2D + if ADD_WAIFU: + for jsf in [ + "file=themes/waifu_plugin/jquery.min.js", + "file=themes/waifu_plugin/jquery-ui.min.js", + ]: + js += f"""\n""" + return js \ No newline at end of file diff --git a/themes/contrast.py b/themes/contrast.py index 9a4b56fd7f07abd9d0fca1c7925398662817e4eb..1e98837755de43f0eb99b02dd4d344064ee600da 100644 --- a/themes/contrast.py +++ b/themes/contrast.py @@ -67,22 +67,9 @@ def adjust_theme(): button_cancel_text_color_dark="white", ) - js = "" - for jsf in [ - os.path.join(theme_dir, "common.js"), - os.path.join(theme_dir, "mermaid.min.js"), - os.path.join(theme_dir, "mermaid_loader.js"), - ]: - with open(jsf, "r", encoding="utf8") as f: - js += f"" - - # 添加一个萌萌的看板娘 - if ADD_WAIFU: - js += """ - - - - """ + from themes.common import get_common_html_javascript_code + js = get_common_html_javascript_code() + if not hasattr(gr, "RawTemplateResponse"): gr.RawTemplateResponse = gr.routes.templates.TemplateResponse gradio_original_template_fn = gr.RawTemplateResponse diff --git a/themes/default.py b/themes/default.py index b8e943197033c29d6df8b45fd61fc55eda2ebb00..a65b0119f36b694c41abd06609eeeee96f958a21 100644 --- a/themes/default.py +++ b/themes/default.py @@ -67,22 +67,8 @@ def adjust_theme(): button_cancel_text_color_dark="white", ) - js = "" - for jsf in [ - os.path.join(theme_dir, "common.js"), - os.path.join(theme_dir, "mermaid.min.js"), - os.path.join(theme_dir, "mermaid_loader.js"), - ]: - with open(jsf, "r", encoding="utf8") as f: - js += f"" - - # 添加一个萌萌的看板娘 - if ADD_WAIFU: - js += """ - - - - """ + from themes.common import get_common_html_javascript_code + js = get_common_html_javascript_code() if not hasattr(gr, "RawTemplateResponse"): gr.RawTemplateResponse = gr.routes.templates.TemplateResponse gradio_original_template_fn = gr.RawTemplateResponse diff --git a/themes/gradios.py b/themes/gradios.py index 68f15df88dbd98f84251b7117316b51a50740f78..14d88a2996df118e4ac4a21717a4568df2d04226 100644 --- a/themes/gradios.py +++ b/themes/gradios.py @@ -31,23 +31,9 @@ def adjust_theme(): THEME = THEME.lstrip("huggingface-") set_theme = set_theme.from_hub(THEME.lower()) - js = "" - for jsf in [ - os.path.join(theme_dir, "common.js"), - os.path.join(theme_dir, "mermaid.min.js"), - os.path.join(theme_dir, "mermaid_loader.js"), - ]: - with open(jsf, "r", encoding="utf8") as f: - js += f"" - - - # 添加一个萌萌的看板娘 - if ADD_WAIFU: - js += """ - - - - """ + from themes.common import get_common_html_javascript_code + js = get_common_html_javascript_code() + if not hasattr(gr, "RawTemplateResponse"): gr.RawTemplateResponse = gr.routes.templates.TemplateResponse gradio_original_template_fn = gr.RawTemplateResponse diff --git a/themes/green.py b/themes/green.py index 84287417436274b83a09caeb6dc2c9c72b30badc..b16249a8e712b7387a3b5fcca332cf435bdaf1fb 100644 --- a/themes/green.py +++ b/themes/green.py @@ -76,22 +76,8 @@ def adjust_theme(): chatbot_code_background_color_dark="*neutral_950", ) - js = "" - for jsf in [ - os.path.join(theme_dir, "common.js"), - os.path.join(theme_dir, "mermaid.min.js"), - os.path.join(theme_dir, "mermaid_loader.js"), - ]: - with open(jsf, "r", encoding="utf8") as f: - js += f"" - - # 添加一个萌萌的看板娘 - if ADD_WAIFU: - js += """ - - - - """ + from themes.common import get_common_html_javascript_code + js = get_common_html_javascript_code() with open(os.path.join(theme_dir, "green.js"), "r", encoding="utf8") as f: js += f"" diff --git a/themes/mermaid.min.js b/themes/mermaid.min.js index 87df8091797d85c18867a1e49be8dd0860c0553a..b842822bd75fdc06508b732335864a2ffd66a418 100644 --- a/themes/mermaid.min.js +++ b/themes/mermaid.min.js @@ -1,1589 +1 @@ -(function(T2,G0){typeof exports=="object"&&typeof module<"u"?module.exports=G0():typeof define=="function"&&define.amd?define(G0):(T2=typeof globalThis<"u"?globalThis:T2||self,T2.mermaid=G0())})(this,function(){"use strict";var VPn=Object.defineProperty;var UPn=(T2,G0,Ar)=>G0 in T2?VPn(T2,G0,{enumerable:!0,configurable:!0,writable:!0,value:Ar}):T2[G0]=Ar;var CL=(T2,G0,Ar)=>(UPn(T2,typeof G0!="symbol"?G0+"":G0,Ar),Ar);function T2(r){for(var i=[],o=1;o>>0,l;for(l=0;l0)for(o=0;o=0;return(b?o?"+":"":"-")+Math.pow(10,Math.max(0,f)).toString().substr(1)+l}var Tie=/(\[[^\[]*\])|(\\)?([Hh]mm(ss)?|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Qo?|N{1,5}|YYYYYY|YYYYY|YYYY|YY|y{2,4}|yo?|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|kk?|mm?|ss?|S{1,9}|x|X|zz?|ZZ?|.)/g,az=/(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g,Cie={},_T={};function Ki(r,i,o,l){var f=l;typeof l=="string"&&(f=function(){return this[l]()}),r&&(_T[r]=f),i&&(_T[i[0]]=function(){return Sm(f.apply(this,arguments),i[1],i[2])}),o&&(_T[o]=function(){return this.localeData().ordinal(f.apply(this,arguments),r)})}function $_t(r){return r.match(/\[[\s\S]/)?r.replace(/^\[|\]$/g,""):r.replace(/\\/g,"")}function H_t(r){var i=r.match(Tie),o,l;for(o=0,l=i.length;o=0&&az.test(r);)r=r.replace(az,l),az.lastIndex=0,o-=1;return r}var z_t={LTS:"h:mm:ss A",LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY h:mm A",LLLL:"dddd, MMMM D, YYYY h:mm A"};function G_t(r){var i=this._longDateFormat[r],o=this._longDateFormat[r.toUpperCase()];return i||!o?i:(this._longDateFormat[r]=o.match(Tie).map(function(l){return l==="MMMM"||l==="MM"||l==="DD"||l==="dddd"?l.slice(1):l}).join(""),this._longDateFormat[r])}var V_t="Invalid date";function U_t(){return this._invalidDate}var q_t="%d",Y_t=/\d{1,2}/;function W_t(r){return this._ordinal.replace("%d",r)}var K_t={future:"in %s",past:"%s ago",s:"a few seconds",ss:"%d seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",w:"a week",ww:"%d weeks",M:"a month",MM:"%d months",y:"a year",yy:"%d years"};function X_t(r,i,o,l){var f=this._relativeTime[o];return Cm(f)?f(r,i,o,l):f.replace(/%d/i,r)}function Q_t(r,i){var o=this._relativeTime[r>0?"future":"past"];return Cm(o)?o(i):o.replace(/%s/i,i)}var ML={};function nd(r,i){var o=r.toLowerCase();ML[o]=ML[o+"s"]=ML[i]=r}function S2(r){return typeof r=="string"?ML[r]||ML[r.toLowerCase()]:void 0}function Sie(r){var i={},o,l;for(l in r)Mo(r,l)&&(o=S2(l),o&&(i[o]=r[l]));return i}var r9e={};function rd(r,i){r9e[r]=i}function Z_t(r){var i=[],o;for(o in r)Mo(r,o)&&i.push({unit:o,priority:r9e[o]});return i.sort(function(l,f){return l.priority-f.priority}),i}function cz(r){return r%4===0&&r%100!==0||r%400===0}function A2(r){return r<0?Math.ceil(r)||0:Math.floor(r)}function Fa(r){var i=+r,o=0;return i!==0&&isFinite(i)&&(o=A2(i)),o}function TT(r,i){return function(o){return o!=null?(i9e(this,r,o),Ar.updateOffset(this,i),this):uz(this,r)}}function uz(r,i){return r.isValid()?r._d["get"+(r._isUTC?"UTC":"")+i]():NaN}function i9e(r,i,o){r.isValid()&&!isNaN(o)&&(i==="FullYear"&&cz(r.year())&&r.month()===1&&r.date()===29?(o=Fa(o),r._d["set"+(r._isUTC?"UTC":"")+i](o,r.month(),bz(o,r.month()))):r._d["set"+(r._isUTC?"UTC":"")+i](o))}function J_t(r){return r=S2(r),Cm(this[r])?this[r]():this}function eTt(r,i){if(typeof r=="object"){r=Sie(r);var o=Z_t(r),l,f=o.length;for(l=0;l68?1900:2e3)};var g9e=TT("FullYear",!0);function mTt(){return cz(this.year())}function yTt(r,i,o,l,f,b,d){var w;return r<100&&r>=0?(w=new Date(r+400,i,o,l,f,b,d),isFinite(w.getFullYear())&&w.setFullYear(r)):w=new Date(r,i,o,l,f,b,d),w}function OL(r){var i,o;return r<100&&r>=0?(o=Array.prototype.slice.call(arguments),o[0]=r+400,i=new Date(Date.UTC.apply(null,o)),isFinite(i.getUTCFullYear())&&i.setUTCFullYear(r)):i=new Date(Date.UTC.apply(null,arguments)),i}function vz(r,i,o){var l=7+i-o,f=(7+OL(r,0,l).getUTCDay()-i)%7;return-f+l-1}function p9e(r,i,o,l,f){var b=(7+o-l)%7,d=vz(r,l,f),w=1+7*(i-1)+b+d,y,k;return w<=0?(y=r-1,k=IL(y)+w):w>IL(r)?(y=r+1,k=w-IL(r)):(y=r,k=w),{year:y,dayOfYear:k}}function NL(r,i,o){var l=vz(r.year(),i,o),f=Math.floor((r.dayOfYear()-l-1)/7)+1,b,d;return f<1?(d=r.year()-1,b=f+N3(d,i,o)):f>N3(r.year(),i,o)?(b=f-N3(r.year(),i,o),d=r.year()+1):(d=r.year(),b=f),{week:b,year:d}}function N3(r,i,o){var l=vz(r,i,o),f=vz(r+1,i,o);return(IL(r)-l+f)/7}Ki("w",["ww",2],"wo","week"),Ki("W",["WW",2],"Wo","isoWeek"),nd("week","w"),nd("isoWeek","W"),rd("week",5),rd("isoWeek",5),ci("w",wu),ci("ww",wu,op),ci("W",wu),ci("WW",wu,op),LL(["w","ww","W","WW"],function(r,i,o,l){i[l.substr(0,1)]=Fa(r)});function kTt(r){return NL(r,this._week.dow,this._week.doy).week}var xTt={dow:0,doy:6};function ETt(){return this._week.dow}function _Tt(){return this._week.doy}function TTt(r){var i=this.localeData().week(this);return r==null?i:this.add((r-i)*7,"d")}function CTt(r){var i=NL(this,1,4).week;return r==null?i:this.add((r-i)*7,"d")}Ki("d",0,"do","day"),Ki("dd",0,0,function(r){return this.localeData().weekdaysMin(this,r)}),Ki("ddd",0,0,function(r){return this.localeData().weekdaysShort(this,r)}),Ki("dddd",0,0,function(r){return this.localeData().weekdays(this,r)}),Ki("e",0,0,"weekday"),Ki("E",0,0,"isoWeekday"),nd("day","d"),nd("weekday","e"),nd("isoWeekday","E"),rd("day",11),rd("weekday",11),rd("isoWeekday",11),ci("d",wu),ci("e",wu),ci("E",wu),ci("dd",function(r,i){return i.weekdaysMinRegex(r)}),ci("ddd",function(r,i){return i.weekdaysShortRegex(r)}),ci("dddd",function(r,i){return i.weekdaysRegex(r)}),LL(["dd","ddd","dddd"],function(r,i,o,l){var f=o._locale.weekdaysParse(r,l,o._strict);f!=null?i.d=f:ba(o).invalidWeekday=r}),LL(["d","e","E"],function(r,i,o,l){i[l]=Fa(r)});function STt(r,i){return typeof r!="string"?r:isNaN(r)?(r=i.weekdaysParse(r),typeof r=="number"?r:null):parseInt(r,10)}function ATt(r,i){return typeof r=="string"?i.weekdaysParse(r)%7||7:isNaN(r)?null:r}function Lie(r,i){return r.slice(i,7).concat(r.slice(0,i))}var MTt="Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),b9e="Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),DTt="Su_Mo_Tu_We_Th_Fr_Sa".split("_"),LTt=DL,ITt=DL,OTt=DL;function NTt(r,i){var o=mv(this._weekdays)?this._weekdays:this._weekdays[r&&r!==!0&&this._weekdays.isFormat.test(i)?"format":"standalone"];return r===!0?Lie(o,this._week.dow):r?o[r.day()]:o}function PTt(r){return r===!0?Lie(this._weekdaysShort,this._week.dow):r?this._weekdaysShort[r.day()]:this._weekdaysShort}function BTt(r){return r===!0?Lie(this._weekdaysMin,this._week.dow):r?this._weekdaysMin[r.day()]:this._weekdaysMin}function FTt(r,i,o){var l,f,b,d=r.toLocaleLowerCase();if(!this._weekdaysParse)for(this._weekdaysParse=[],this._shortWeekdaysParse=[],this._minWeekdaysParse=[],l=0;l<7;++l)b=Tm([2e3,1]).day(l),this._minWeekdaysParse[l]=this.weekdaysMin(b,"").toLocaleLowerCase(),this._shortWeekdaysParse[l]=this.weekdaysShort(b,"").toLocaleLowerCase(),this._weekdaysParse[l]=this.weekdays(b,"").toLocaleLowerCase();return o?i==="dddd"?(f=ih.call(this._weekdaysParse,d),f!==-1?f:null):i==="ddd"?(f=ih.call(this._shortWeekdaysParse,d),f!==-1?f:null):(f=ih.call(this._minWeekdaysParse,d),f!==-1?f:null):i==="dddd"?(f=ih.call(this._weekdaysParse,d),f!==-1||(f=ih.call(this._shortWeekdaysParse,d),f!==-1)?f:(f=ih.call(this._minWeekdaysParse,d),f!==-1?f:null)):i==="ddd"?(f=ih.call(this._shortWeekdaysParse,d),f!==-1||(f=ih.call(this._weekdaysParse,d),f!==-1)?f:(f=ih.call(this._minWeekdaysParse,d),f!==-1?f:null)):(f=ih.call(this._minWeekdaysParse,d),f!==-1||(f=ih.call(this._weekdaysParse,d),f!==-1)?f:(f=ih.call(this._shortWeekdaysParse,d),f!==-1?f:null))}function RTt(r,i,o){var l,f,b;if(this._weekdaysParseExact)return FTt.call(this,r,i,o);for(this._weekdaysParse||(this._weekdaysParse=[],this._minWeekdaysParse=[],this._shortWeekdaysParse=[],this._fullWeekdaysParse=[]),l=0;l<7;l++){if(f=Tm([2e3,1]).day(l),o&&!this._fullWeekdaysParse[l]&&(this._fullWeekdaysParse[l]=new RegExp("^"+this.weekdays(f,"").replace(".","\\.?")+"$","i"),this._shortWeekdaysParse[l]=new RegExp("^"+this.weekdaysShort(f,"").replace(".","\\.?")+"$","i"),this._minWeekdaysParse[l]=new RegExp("^"+this.weekdaysMin(f,"").replace(".","\\.?")+"$","i")),this._weekdaysParse[l]||(b="^"+this.weekdays(f,"")+"|^"+this.weekdaysShort(f,"")+"|^"+this.weekdaysMin(f,""),this._weekdaysParse[l]=new RegExp(b.replace(".",""),"i")),o&&i==="dddd"&&this._fullWeekdaysParse[l].test(r))return l;if(o&&i==="ddd"&&this._shortWeekdaysParse[l].test(r))return l;if(o&&i==="dd"&&this._minWeekdaysParse[l].test(r))return l;if(!o&&this._weekdaysParse[l].test(r))return l}}function jTt(r){if(!this.isValid())return r!=null?this:NaN;var i=this._isUTC?this._d.getUTCDay():this._d.getDay();return r!=null?(r=STt(r,this.localeData()),this.add(r-i,"d")):i}function $Tt(r){if(!this.isValid())return r!=null?this:NaN;var i=(this.day()+7-this.localeData()._week.dow)%7;return r==null?i:this.add(r-i,"d")}function HTt(r){if(!this.isValid())return r!=null?this:NaN;if(r!=null){var i=ATt(r,this.localeData());return this.day(this.day()%7?i:i-7)}else return this.day()||7}function zTt(r){return this._weekdaysParseExact?(Mo(this,"_weekdaysRegex")||Iie.call(this),r?this._weekdaysStrictRegex:this._weekdaysRegex):(Mo(this,"_weekdaysRegex")||(this._weekdaysRegex=LTt),this._weekdaysStrictRegex&&r?this._weekdaysStrictRegex:this._weekdaysRegex)}function GTt(r){return this._weekdaysParseExact?(Mo(this,"_weekdaysRegex")||Iie.call(this),r?this._weekdaysShortStrictRegex:this._weekdaysShortRegex):(Mo(this,"_weekdaysShortRegex")||(this._weekdaysShortRegex=ITt),this._weekdaysShortStrictRegex&&r?this._weekdaysShortStrictRegex:this._weekdaysShortRegex)}function VTt(r){return this._weekdaysParseExact?(Mo(this,"_weekdaysRegex")||Iie.call(this),r?this._weekdaysMinStrictRegex:this._weekdaysMinRegex):(Mo(this,"_weekdaysMinRegex")||(this._weekdaysMinRegex=OTt),this._weekdaysMinStrictRegex&&r?this._weekdaysMinStrictRegex:this._weekdaysMinRegex)}function Iie(){function r(E,T){return T.length-E.length}var i=[],o=[],l=[],f=[],b,d,w,y,k;for(b=0;b<7;b++)d=Tm([2e3,1]).day(b),w=cp(this.weekdaysMin(d,"")),y=cp(this.weekdaysShort(d,"")),k=cp(this.weekdays(d,"")),i.push(w),o.push(y),l.push(k),f.push(w),f.push(y),f.push(k);i.sort(r),o.sort(r),l.sort(r),f.sort(r),this._weekdaysRegex=new RegExp("^("+f.join("|")+")","i"),this._weekdaysShortRegex=this._weekdaysRegex,this._weekdaysMinRegex=this._weekdaysRegex,this._weekdaysStrictRegex=new RegExp("^("+l.join("|")+")","i"),this._weekdaysShortStrictRegex=new RegExp("^("+o.join("|")+")","i"),this._weekdaysMinStrictRegex=new RegExp("^("+i.join("|")+")","i")}function Oie(){return this.hours()%12||12}function UTt(){return this.hours()||24}Ki("H",["HH",2],0,"hour"),Ki("h",["hh",2],0,Oie),Ki("k",["kk",2],0,UTt),Ki("hmm",0,0,function(){return""+Oie.apply(this)+Sm(this.minutes(),2)}),Ki("hmmss",0,0,function(){return""+Oie.apply(this)+Sm(this.minutes(),2)+Sm(this.seconds(),2)}),Ki("Hmm",0,0,function(){return""+this.hours()+Sm(this.minutes(),2)}),Ki("Hmmss",0,0,function(){return""+this.hours()+Sm(this.minutes(),2)+Sm(this.seconds(),2)});function v9e(r,i){Ki(r,0,0,function(){return this.localeData().meridiem(this.hours(),this.minutes(),i)})}v9e("a",!0),v9e("A",!1),nd("hour","h"),rd("hour",13);function w9e(r,i){return i._meridiemParse}ci("a",w9e),ci("A",w9e),ci("H",wu),ci("h",wu),ci("k",wu),ci("HH",wu,op),ci("hh",wu,op),ci("kk",wu,op),ci("hmm",o9e),ci("hmmss",c9e),ci("Hmm",o9e),ci("Hmmss",c9e),Mc(["H","HH"],of),Mc(["k","kk"],function(r,i,o){var l=Fa(r);i[of]=l===24?0:l}),Mc(["a","A"],function(r,i,o){o._isPm=o._locale.isPM(r),o._meridiem=r}),Mc(["h","hh"],function(r,i,o){i[of]=Fa(r),ba(o).bigHour=!0}),Mc("hmm",function(r,i,o){var l=r.length-2;i[of]=Fa(r.substr(0,l)),i[kv]=Fa(r.substr(l)),ba(o).bigHour=!0}),Mc("hmmss",function(r,i,o){var l=r.length-4,f=r.length-2;i[of]=Fa(r.substr(0,l)),i[kv]=Fa(r.substr(l,2)),i[O3]=Fa(r.substr(f)),ba(o).bigHour=!0}),Mc("Hmm",function(r,i,o){var l=r.length-2;i[of]=Fa(r.substr(0,l)),i[kv]=Fa(r.substr(l))}),Mc("Hmmss",function(r,i,o){var l=r.length-4,f=r.length-2;i[of]=Fa(r.substr(0,l)),i[kv]=Fa(r.substr(l,2)),i[O3]=Fa(r.substr(f))});function qTt(r){return(r+"").toLowerCase().charAt(0)==="p"}var YTt=/[ap]\.?m?\.?/i,WTt=TT("Hours",!0);function KTt(r,i,o){return r>11?o?"pm":"PM":o?"am":"AM"}var m9e={calendar:R_t,longDateFormat:z_t,invalidDate:V_t,ordinal:q_t,dayOfMonthOrdinalParse:Y_t,relativeTime:K_t,months:uTt,monthsShort:u9e,week:xTt,weekdays:MTt,weekdaysMin:DTt,weekdaysShort:b9e,meridiemParse:YTt},Nu={},PL={},BL;function XTt(r,i){var o,l=Math.min(r.length,i.length);for(o=0;o0;){if(f=wz(b.slice(0,o).join("-")),f)return f;if(l&&l.length>=o&&XTt(b,l)>=o-1)break;o--}i++}return BL}function ZTt(r){return r.match("^[^/\\\\]*$")!=null}function wz(r){var i=null,o;if(Nu[r]===void 0&&typeof module<"u"&&module&&module.exports&&ZTt(r))try{i=BL._abbr,o=require,o("./locale/"+r),r6(i)}catch{Nu[r]=null}return Nu[r]}function r6(r,i){var o;return r&&(V0(i)?o=P3(r):o=Nie(r,i),o?BL=o:typeof console<"u"&&console.warn&&console.warn("Locale "+r+" not found. Did you forget to load it?")),BL._abbr}function Nie(r,i){if(i!==null){var o,l=m9e;if(i.abbr=r,Nu[r]!=null)t9e("defineLocaleOverride","use moment.updateLocale(localeName, config) to change an existing locale. moment.defineLocale(localeName, config) should only be used for creating a new locale See http://momentjs.com/guides/#/warnings/define-locale/ for more info."),l=Nu[r]._config;else if(i.parentLocale!=null)if(Nu[i.parentLocale]!=null)l=Nu[i.parentLocale]._config;else if(o=wz(i.parentLocale),o!=null)l=o._config;else return PL[i.parentLocale]||(PL[i.parentLocale]=[]),PL[i.parentLocale].push({name:r,config:i}),null;return Nu[r]=new Eie(xie(l,i)),PL[r]&&PL[r].forEach(function(f){Nie(f.name,f.config)}),r6(r),Nu[r]}else return delete Nu[r],null}function JTt(r,i){if(i!=null){var o,l,f=m9e;Nu[r]!=null&&Nu[r].parentLocale!=null?Nu[r].set(xie(Nu[r]._config,i)):(l=wz(r),l!=null&&(f=l._config),i=xie(f,i),l==null&&(i.abbr=r),o=new Eie(i),o.parentLocale=Nu[r],Nu[r]=o),r6(r)}else Nu[r]!=null&&(Nu[r].parentLocale!=null?(Nu[r]=Nu[r].parentLocale,r===r6()&&r6(r)):Nu[r]!=null&&delete Nu[r]);return Nu[r]}function P3(r){var i;if(r&&r._locale&&r._locale._abbr&&(r=r._locale._abbr),!r)return BL;if(!mv(r)){if(i=wz(r),i)return i;r=[r]}return QTt(r)}function eCt(){return _ie(Nu)}function Pie(r){var i,o=r._a;return o&&ba(r).overflow===-2&&(i=o[I3]<0||o[I3]>11?I3:o[Am]<1||o[Am]>bz(o[id],o[I3])?Am:o[of]<0||o[of]>24||o[of]===24&&(o[kv]!==0||o[O3]!==0||o[D8]!==0)?of:o[kv]<0||o[kv]>59?kv:o[O3]<0||o[O3]>59?O3:o[D8]<0||o[D8]>999?D8:-1,ba(r)._overflowDayOfYear&&(iAm)&&(i=Am),ba(r)._overflowWeeks&&i===-1&&(i=aTt),ba(r)._overflowWeekday&&i===-1&&(i=oTt),ba(r).overflow=i),r}var tCt=/^\s*((?:[+-]\d{6}|\d{4})-(?:\d\d-\d\d|W\d\d-\d|W\d\d|\d\d\d|\d\d))(?:(T| )(\d\d(?::\d\d(?::\d\d(?:[.,]\d+)?)?)?)([+-]\d\d(?::?\d\d)?|\s*Z)?)?$/,nCt=/^\s*((?:[+-]\d{6}|\d{4})(?:\d\d\d\d|W\d\d\d|W\d\d|\d\d\d|\d\d|))(?:(T| )(\d\d(?:\d\d(?:\d\d(?:[.,]\d+)?)?)?)([+-]\d\d(?::?\d\d)?|\s*Z)?)?$/,rCt=/Z|[+-]\d\d(?::?\d\d)?/,mz=[["YYYYYY-MM-DD",/[+-]\d{6}-\d\d-\d\d/],["YYYY-MM-DD",/\d{4}-\d\d-\d\d/],["GGGG-[W]WW-E",/\d{4}-W\d\d-\d/],["GGGG-[W]WW",/\d{4}-W\d\d/,!1],["YYYY-DDD",/\d{4}-\d{3}/],["YYYY-MM",/\d{4}-\d\d/,!1],["YYYYYYMMDD",/[+-]\d{10}/],["YYYYMMDD",/\d{8}/],["GGGG[W]WWE",/\d{4}W\d{3}/],["GGGG[W]WW",/\d{4}W\d{2}/,!1],["YYYYDDD",/\d{7}/],["YYYYMM",/\d{6}/,!1],["YYYY",/\d{4}/,!1]],Bie=[["HH:mm:ss.SSSS",/\d\d:\d\d:\d\d\.\d+/],["HH:mm:ss,SSSS",/\d\d:\d\d:\d\d,\d+/],["HH:mm:ss",/\d\d:\d\d:\d\d/],["HH:mm",/\d\d:\d\d/],["HHmmss.SSSS",/\d\d\d\d\d\d\.\d+/],["HHmmss,SSSS",/\d\d\d\d\d\d,\d+/],["HHmmss",/\d\d\d\d\d\d/],["HHmm",/\d\d\d\d/],["HH",/\d\d/]],iCt=/^\/?Date\((-?\d+)/i,sCt=/^(?:(Mon|Tue|Wed|Thu|Fri|Sat|Sun),?\s)?(\d{1,2})\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s(\d{2,4})\s(\d\d):(\d\d)(?::(\d\d))?\s(?:(UT|GMT|[ECMP][SD]T)|([Zz])|([+-]\d{4}))$/,aCt={UT:0,GMT:0,EDT:-4*60,EST:-5*60,CDT:-5*60,CST:-6*60,MDT:-6*60,MST:-7*60,PDT:-7*60,PST:-8*60};function k9e(r){var i,o,l=r._i,f=tCt.exec(l)||nCt.exec(l),b,d,w,y,k=mz.length,E=Bie.length;if(f){for(ba(r).iso=!0,i=0,o=k;iIL(d)||r._dayOfYear===0)&&(ba(r)._overflowDayOfYear=!0),o=OL(d,0,r._dayOfYear),r._a[I3]=o.getUTCMonth(),r._a[Am]=o.getUTCDate()),i=0;i<3&&r._a[i]==null;++i)r._a[i]=l[i]=f[i];for(;i<7;i++)r._a[i]=l[i]=r._a[i]==null?i===2?1:0:r._a[i];r._a[of]===24&&r._a[kv]===0&&r._a[O3]===0&&r._a[D8]===0&&(r._nextDay=!0,r._a[of]=0),r._d=(r._useUTC?OL:yTt).apply(null,l),b=r._useUTC?r._d.getUTCDay():r._d.getDay(),r._tzm!=null&&r._d.setUTCMinutes(r._d.getUTCMinutes()-r._tzm),r._nextDay&&(r._a[of]=24),r._w&&typeof r._w.d<"u"&&r._w.d!==b&&(ba(r).weekdayMismatch=!0)}}function gCt(r){var i,o,l,f,b,d,w,y,k;i=r._w,i.GG!=null||i.W!=null||i.E!=null?(b=1,d=4,o=ST(i.GG,r._a[id],NL(mu(),1,4).year),l=ST(i.W,1),f=ST(i.E,1),(f<1||f>7)&&(y=!0)):(b=r._locale._week.dow,d=r._locale._week.doy,k=NL(mu(),b,d),o=ST(i.gg,r._a[id],k.year),l=ST(i.w,k.week),i.d!=null?(f=i.d,(f<0||f>6)&&(y=!0)):i.e!=null?(f=i.e+b,(i.e<0||i.e>6)&&(y=!0)):f=b),l<1||l>N3(o,b,d)?ba(r)._overflowWeeks=!0:y!=null?ba(r)._overflowWeekday=!0:(w=p9e(o,l,f,b,d),r._a[id]=w.year,r._dayOfYear=w.dayOfYear)}Ar.ISO_8601=function(){},Ar.RFC_2822=function(){};function Rie(r){if(r._f===Ar.ISO_8601){k9e(r);return}if(r._f===Ar.RFC_2822){x9e(r);return}r._a=[],ba(r).empty=!0;var i=""+r._i,o,l,f,b,d,w=i.length,y=0,k,E;for(f=n9e(r._f,r._locale).match(Tie)||[],E=f.length,o=0;o0&&ba(r).unusedInput.push(d),i=i.slice(i.indexOf(l)+l.length),y+=l.length),_T[b]?(l?ba(r).empty=!1:ba(r).unusedTokens.push(b),sTt(b,l,r)):r._strict&&!l&&ba(r).unusedTokens.push(b);ba(r).charsLeftOver=w-y,i.length>0&&ba(r).unusedInput.push(i),r._a[of]<=12&&ba(r).bigHour===!0&&r._a[of]>0&&(ba(r).bigHour=void 0),ba(r).parsedDateParts=r._a.slice(0),ba(r).meridiem=r._meridiem,r._a[of]=pCt(r._locale,r._a[of],r._meridiem),k=ba(r).era,k!==null&&(r._a[id]=r._locale.erasConvertYear(k,r._a[id])),Fie(r),Pie(r)}function pCt(r,i,o){var l;return o==null?i:r.meridiemHour!=null?r.meridiemHour(i,o):(r.isPM!=null&&(l=r.isPM(o),l&&i<12&&(i+=12),!l&&i===12&&(i=0)),i)}function bCt(r){var i,o,l,f,b,d,w=!1,y=r._f.length;if(y===0){ba(r).invalidFormat=!0,r._d=new Date(NaN);return}for(f=0;fthis?this:r:sz()});function T9e(r,i){var o,l;if(i.length===1&&mv(i[0])&&(i=i[0]),!i.length)return mu();for(o=i[0],l=1;lthis.clone().month(0).utcOffset()||this.utcOffset()>this.clone().month(5).utcOffset()}function FCt(){if(!V0(this._isDSTShifted))return this._isDSTShifted;var r={},i;return kie(r,this),r=E9e(r),r._a?(i=r._isUTC?Tm(r._a):mu(r._a),this._isDSTShifted=this.isValid()&&ACt(r._a,i.toArray())>0):this._isDSTShifted=!1,this._isDSTShifted}function RCt(){return this.isValid()?!this._isUTC:!1}function jCt(){return this.isValid()?this._isUTC:!1}function S9e(){return this.isValid()?this._isUTC&&this._offset===0:!1}var $Ct=/^(-|\+)?(?:(\d*)[. ])?(\d+):(\d+)(?::(\d+)(\.\d*)?)?$/,HCt=/^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;function xv(r,i){var o=r,l=null,f,b,d;return kz(r)?o={ms:r._milliseconds,d:r._days,M:r._months}:L3(r)||!isNaN(+r)?(o={},i?o[i]=+r:o.milliseconds=+r):(l=$Ct.exec(r))?(f=l[1]==="-"?-1:1,o={y:0,d:Fa(l[Am])*f,h:Fa(l[of])*f,m:Fa(l[kv])*f,s:Fa(l[O3])*f,ms:Fa(jie(l[D8]*1e3))*f}):(l=HCt.exec(r))?(f=l[1]==="-"?-1:1,o={y:L8(l[2],f),M:L8(l[3],f),w:L8(l[4],f),d:L8(l[5],f),h:L8(l[6],f),m:L8(l[7],f),s:L8(l[8],f)}):o==null?o={}:typeof o=="object"&&("from"in o||"to"in o)&&(d=zCt(mu(o.from),mu(o.to)),o={},o.ms=d.milliseconds,o.M=d.months),b=new yz(o),kz(r)&&Mo(r,"_locale")&&(b._locale=r._locale),kz(r)&&Mo(r,"_isValid")&&(b._isValid=r._isValid),b}xv.fn=yz.prototype,xv.invalid=SCt;function L8(r,i){var o=r&&parseFloat(r.replace(",","."));return(isNaN(o)?0:o)*i}function A9e(r,i){var o={};return o.months=i.month()-r.month()+(i.year()-r.year())*12,r.clone().add(o.months,"M").isAfter(i)&&--o.months,o.milliseconds=+i-+r.clone().add(o.months,"M"),o}function zCt(r,i){var o;return r.isValid()&&i.isValid()?(i=Hie(i,r),r.isBefore(i)?o=A9e(r,i):(o=A9e(i,r),o.milliseconds=-o.milliseconds,o.months=-o.months),o):{milliseconds:0,months:0}}function M9e(r,i){return function(o,l){var f,b;return l!==null&&!isNaN(+l)&&(t9e(i,"moment()."+i+"(period, number) is deprecated. Please use moment()."+i+"(number, period). See http://momentjs.com/guides/#/warnings/add-inverted-param/ for more info."),b=o,o=l,l=b),f=xv(o,l),D9e(this,f,r),this}}function D9e(r,i,o,l){var f=i._milliseconds,b=jie(i._days),d=jie(i._months);!r.isValid()||(l=l==null?!0:l,d&&h9e(r,uz(r,"Month")+d*o),b&&i9e(r,"Date",uz(r,"Date")+b*o),f&&r._d.setTime(r._d.valueOf()+f*o),l&&Ar.updateOffset(r,b||d))}var GCt=M9e(1,"add"),VCt=M9e(-1,"subtract");function L9e(r){return typeof r=="string"||r instanceof String}function UCt(r){return yv(r)||SL(r)||L9e(r)||L3(r)||YCt(r)||qCt(r)||r===null||r===void 0}function qCt(r){var i=M8(r)&&!vie(r),o=!1,l=["years","year","y","months","month","M","days","day","d","dates","date","D","hours","hour","h","minutes","minute","m","seconds","second","s","milliseconds","millisecond","ms"],f,b,d=l.length;for(f=0;fo.valueOf():o.valueOf()9999?oz(o,i?"YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]":"YYYYYY-MM-DD[T]HH:mm:ss.SSSZ"):Cm(Date.prototype.toISOString)?i?this.toDate().toISOString():new Date(this.valueOf()+this.utcOffset()*60*1e3).toISOString().replace("Z",oz(o,"Z")):oz(o,i?"YYYY-MM-DD[T]HH:mm:ss.SSS[Z]":"YYYY-MM-DD[T]HH:mm:ss.SSSZ")}function oSt(){if(!this.isValid())return"moment.invalid(/* "+this._i+" */)";var r="moment",i="",o,l,f,b;return this.isLocal()||(r=this.utcOffset()===0?"moment.utc":"moment.parseZone",i="Z"),o="["+r+'("]',l=0<=this.year()&&this.year()<=9999?"YYYY":"YYYYYY",f="-MM-DD[T]HH:mm:ss.SSS",b=i+'[")]',this.format(o+l+f+b)}function cSt(r){r||(r=this.isUtc()?Ar.defaultFormatUtc:Ar.defaultFormat);var i=oz(this,r);return this.localeData().postformat(i)}function uSt(r,i){return this.isValid()&&(yv(r)&&r.isValid()||mu(r).isValid())?xv({to:this,from:r}).locale(this.locale()).humanize(!i):this.localeData().invalidDate()}function lSt(r){return this.from(mu(),r)}function hSt(r,i){return this.isValid()&&(yv(r)&&r.isValid()||mu(r).isValid())?xv({from:this,to:r}).locale(this.locale()).humanize(!i):this.localeData().invalidDate()}function fSt(r){return this.to(mu(),r)}function I9e(r){var i;return r===void 0?this._locale._abbr:(i=P3(r),i!=null&&(this._locale=i),this)}var O9e=C2("moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.",function(r){return r===void 0?this.localeData():this.locale(r)});function N9e(){return this._locale}var Ez=1e3,AT=60*Ez,_z=60*AT,P9e=(365*400+97)*24*_z;function MT(r,i){return(r%i+i)%i}function B9e(r,i,o){return r<100&&r>=0?new Date(r+400,i,o)-P9e:new Date(r,i,o).valueOf()}function F9e(r,i,o){return r<100&&r>=0?Date.UTC(r+400,i,o)-P9e:Date.UTC(r,i,o)}function dSt(r){var i,o;if(r=S2(r),r===void 0||r==="millisecond"||!this.isValid())return this;switch(o=this._isUTC?F9e:B9e,r){case"year":i=o(this.year(),0,1);break;case"quarter":i=o(this.year(),this.month()-this.month()%3,1);break;case"month":i=o(this.year(),this.month(),1);break;case"week":i=o(this.year(),this.month(),this.date()-this.weekday());break;case"isoWeek":i=o(this.year(),this.month(),this.date()-(this.isoWeekday()-1));break;case"day":case"date":i=o(this.year(),this.month(),this.date());break;case"hour":i=this._d.valueOf(),i-=MT(i+(this._isUTC?0:this.utcOffset()*AT),_z);break;case"minute":i=this._d.valueOf(),i-=MT(i,AT);break;case"second":i=this._d.valueOf(),i-=MT(i,Ez);break}return this._d.setTime(i),Ar.updateOffset(this,!0),this}function gSt(r){var i,o;if(r=S2(r),r===void 0||r==="millisecond"||!this.isValid())return this;switch(o=this._isUTC?F9e:B9e,r){case"year":i=o(this.year()+1,0,1)-1;break;case"quarter":i=o(this.year(),this.month()-this.month()%3+3,1)-1;break;case"month":i=o(this.year(),this.month()+1,1)-1;break;case"week":i=o(this.year(),this.month(),this.date()-this.weekday()+7)-1;break;case"isoWeek":i=o(this.year(),this.month(),this.date()-(this.isoWeekday()-1)+7)-1;break;case"day":case"date":i=o(this.year(),this.month(),this.date()+1)-1;break;case"hour":i=this._d.valueOf(),i+=_z-MT(i+(this._isUTC?0:this.utcOffset()*AT),_z)-1;break;case"minute":i=this._d.valueOf(),i+=AT-MT(i,AT)-1;break;case"second":i=this._d.valueOf(),i+=Ez-MT(i,Ez)-1;break}return this._d.setTime(i),Ar.updateOffset(this,!0),this}function pSt(){return this._d.valueOf()-(this._offset||0)*6e4}function bSt(){return Math.floor(this.valueOf()/1e3)}function vSt(){return new Date(this.valueOf())}function wSt(){var r=this;return[r.year(),r.month(),r.date(),r.hour(),r.minute(),r.second(),r.millisecond()]}function mSt(){var r=this;return{years:r.year(),months:r.month(),date:r.date(),hours:r.hours(),minutes:r.minutes(),seconds:r.seconds(),milliseconds:r.milliseconds()}}function ySt(){return this.isValid()?this.toISOString():null}function kSt(){return mie(this)}function xSt(){return n6({},ba(this))}function ESt(){return ba(this).overflow}function _St(){return{input:this._i,format:this._f,locale:this._locale,isUTC:this._isUTC,strict:this._strict}}Ki("N",0,0,"eraAbbr"),Ki("NN",0,0,"eraAbbr"),Ki("NNN",0,0,"eraAbbr"),Ki("NNNN",0,0,"eraName"),Ki("NNNNN",0,0,"eraNarrow"),Ki("y",["y",1],"yo","eraYear"),Ki("y",["yy",2],0,"eraYear"),Ki("y",["yyy",3],0,"eraYear"),Ki("y",["yyyy",4],0,"eraYear"),ci("N",Gie),ci("NN",Gie),ci("NNN",Gie),ci("NNNN",PSt),ci("NNNNN",BSt),Mc(["N","NN","NNN","NNNN","NNNNN"],function(r,i,o,l){var f=o._locale.erasParse(r,l,o._strict);f?ba(o).era=f:ba(o).invalidEra=r}),ci("y",CT),ci("yy",CT),ci("yyy",CT),ci("yyyy",CT),ci("yo",FSt),Mc(["y","yy","yyy","yyyy"],id),Mc(["yo"],function(r,i,o,l){var f;o._locale._eraYearOrdinalRegex&&(f=r.match(o._locale._eraYearOrdinalRegex)),o._locale.eraYearOrdinalParse?i[id]=o._locale.eraYearOrdinalParse(r,f):i[id]=parseInt(r,10)});function TSt(r,i){var o,l,f,b=this._eras||P3("en")._eras;for(o=0,l=b.length;o=0)return b[l]}function SSt(r,i){var o=r.since<=r.until?1:-1;return i===void 0?Ar(r.since).year():Ar(r.since).year()+(i-r.offset)*o}function ASt(){var r,i,o,l=this.localeData().eras();for(r=0,i=l.length;rb&&(i=b),VSt.call(this,r,i,o,l,f))}function VSt(r,i,o,l,f){var b=p9e(r,i,o,l,f),d=OL(b.year,0,b.dayOfYear);return this.year(d.getUTCFullYear()),this.month(d.getUTCMonth()),this.date(d.getUTCDate()),this}Ki("Q",0,"Qo","quarter"),nd("quarter","Q"),rd("quarter",7),ci("Q",s9e),Mc("Q",function(r,i){i[I3]=(Fa(r)-1)*3});function USt(r){return r==null?Math.ceil((this.month()+1)/3):this.month((r-1)*3+this.month()%3)}Ki("D",["DD",2],"Do","date"),nd("date","D"),rd("date",9),ci("D",wu),ci("DD",wu,op),ci("Do",function(r,i){return r?i._dayOfMonthOrdinalParse||i._ordinalParse:i._dayOfMonthOrdinalParseLenient}),Mc(["D","DD"],Am),Mc("Do",function(r,i){i[Am]=Fa(r.match(wu)[0])});var j9e=TT("Date",!0);Ki("DDD",["DDDD",3],"DDDo","dayOfYear"),nd("dayOfYear","DDD"),rd("dayOfYear",4),ci("DDD",hz),ci("DDDD",a9e),Mc(["DDD","DDDD"],function(r,i,o){o._dayOfYear=Fa(r)});function qSt(r){var i=Math.round((this.clone().startOf("day")-this.clone().startOf("year"))/864e5)+1;return r==null?i:this.add(r-i,"d")}Ki("m",["mm",2],0,"minute"),nd("minute","m"),rd("minute",14),ci("m",wu),ci("mm",wu,op),Mc(["m","mm"],kv);var YSt=TT("Minutes",!1);Ki("s",["ss",2],0,"second"),nd("second","s"),rd("second",15),ci("s",wu),ci("ss",wu,op),Mc(["s","ss"],O3);var WSt=TT("Seconds",!1);Ki("S",0,0,function(){return~~(this.millisecond()/100)}),Ki(0,["SS",2],0,function(){return~~(this.millisecond()/10)}),Ki(0,["SSS",3],0,"millisecond"),Ki(0,["SSSS",4],0,function(){return this.millisecond()*10}),Ki(0,["SSSSS",5],0,function(){return this.millisecond()*100}),Ki(0,["SSSSSS",6],0,function(){return this.millisecond()*1e3}),Ki(0,["SSSSSSS",7],0,function(){return this.millisecond()*1e4}),Ki(0,["SSSSSSSS",8],0,function(){return this.millisecond()*1e5}),Ki(0,["SSSSSSSSS",9],0,function(){return this.millisecond()*1e6}),nd("millisecond","ms"),rd("millisecond",16),ci("S",hz,s9e),ci("SS",hz,op),ci("SSS",hz,a9e);var i6,$9e;for(i6="SSSS";i6.length<=9;i6+="S")ci(i6,CT);function KSt(r,i){i[D8]=Fa(("0."+r)*1e3)}for(i6="S";i6.length<=9;i6+="S")Mc(i6,KSt);$9e=TT("Milliseconds",!1),Ki("z",0,0,"zoneAbbr"),Ki("zz",0,0,"zoneName");function XSt(){return this._isUTC?"UTC":""}function QSt(){return this._isUTC?"Coordinated Universal Time":""}var xr=AL.prototype;xr.add=GCt,xr.calendar=XCt,xr.clone=QCt,xr.diff=iSt,xr.endOf=gSt,xr.format=cSt,xr.from=uSt,xr.fromNow=lSt,xr.to=hSt,xr.toNow=fSt,xr.get=J_t,xr.invalidAt=ESt,xr.isAfter=ZCt,xr.isBefore=JCt,xr.isBetween=eSt,xr.isSame=tSt,xr.isSameOrAfter=nSt,xr.isSameOrBefore=rSt,xr.isValid=kSt,xr.lang=O9e,xr.locale=I9e,xr.localeData=N9e,xr.max=kCt,xr.min=yCt,xr.parsingFlags=xSt,xr.set=eTt,xr.startOf=dSt,xr.subtract=VCt,xr.toArray=wSt,xr.toObject=mSt,xr.toDate=vSt,xr.toISOString=aSt,xr.inspect=oSt,typeof Symbol<"u"&&Symbol.for!=null&&(xr[Symbol.for("nodejs.util.inspect.custom")]=function(){return"Moment<"+this.format()+">"}),xr.toJSON=ySt,xr.toString=sSt,xr.unix=bSt,xr.valueOf=pSt,xr.creationData=_St,xr.eraName=ASt,xr.eraNarrow=MSt,xr.eraAbbr=DSt,xr.eraYear=LSt,xr.year=g9e,xr.isLeapYear=mTt,xr.weekYear=RSt,xr.isoWeekYear=jSt,xr.quarter=xr.quarters=USt,xr.month=f9e,xr.daysInMonth=bTt,xr.week=xr.weeks=TTt,xr.isoWeek=xr.isoWeeks=CTt,xr.weeksInYear=zSt,xr.weeksInWeekYear=GSt,xr.isoWeeksInYear=$St,xr.isoWeeksInISOWeekYear=HSt,xr.date=j9e,xr.day=xr.days=jTt,xr.weekday=$Tt,xr.isoWeekday=HTt,xr.dayOfYear=qSt,xr.hour=xr.hours=WTt,xr.minute=xr.minutes=YSt,xr.second=xr.seconds=WSt,xr.millisecond=xr.milliseconds=$9e,xr.utcOffset=DCt,xr.utc=ICt,xr.local=OCt,xr.parseZone=NCt,xr.hasAlignedHourOffset=PCt,xr.isDST=BCt,xr.isLocal=RCt,xr.isUtcOffset=jCt,xr.isUtc=S9e,xr.isUTC=S9e,xr.zoneAbbr=XSt,xr.zoneName=QSt,xr.dates=C2("dates accessor is deprecated. Use date instead.",j9e),xr.months=C2("months accessor is deprecated. Use month instead",f9e),xr.years=C2("years accessor is deprecated. Use year instead",g9e),xr.zone=C2("moment().zone is deprecated, use moment().utcOffset instead. http://momentjs.com/guides/#/warnings/zone/",LCt),xr.isDSTShifted=C2("isDSTShifted is deprecated. See http://momentjs.com/guides/#/warnings/dst-shifted/ for more information",FCt);function ZSt(r){return mu(r*1e3)}function JSt(){return mu.apply(null,arguments).parseZone()}function H9e(r){return r}var Do=Eie.prototype;Do.calendar=j_t,Do.longDateFormat=G_t,Do.invalidDate=U_t,Do.ordinal=W_t,Do.preparse=H9e,Do.postformat=H9e,Do.relativeTime=X_t,Do.pastFuture=Q_t,Do.set=F_t,Do.eras=TSt,Do.erasParse=CSt,Do.erasConvertYear=SSt,Do.erasAbbrRegex=OSt,Do.erasNameRegex=ISt,Do.erasNarrowRegex=NSt,Do.months=fTt,Do.monthsShort=dTt,Do.monthsParse=pTt,Do.monthsRegex=wTt,Do.monthsShortRegex=vTt,Do.week=kTt,Do.firstDayOfYear=_Tt,Do.firstDayOfWeek=ETt,Do.weekdays=NTt,Do.weekdaysMin=BTt,Do.weekdaysShort=PTt,Do.weekdaysParse=RTt,Do.weekdaysRegex=zTt,Do.weekdaysShortRegex=GTt,Do.weekdaysMinRegex=VTt,Do.isPM=qTt,Do.meridiem=KTt;function Cz(r,i,o,l){var f=P3(),b=Tm().set(l,i);return f[o](b,r)}function z9e(r,i,o){if(L3(r)&&(i=r,r=void 0),r=r||"",i!=null)return Cz(r,i,o,"month");var l,f=[];for(l=0;l<12;l++)f[l]=Cz(r,l,o,"month");return f}function Uie(r,i,o,l){typeof r=="boolean"?(L3(i)&&(o=i,i=void 0),i=i||""):(i=r,o=i,r=!1,L3(i)&&(o=i,i=void 0),i=i||"");var f=P3(),b=r?f._week.dow:0,d,w=[];if(o!=null)return Cz(i,(o+b)%7,l,"day");for(d=0;d<7;d++)w[d]=Cz(i,(d+b)%7,l,"day");return w}function eAt(r,i){return z9e(r,i,"months")}function tAt(r,i){return z9e(r,i,"monthsShort")}function nAt(r,i,o){return Uie(r,i,o,"weekdays")}function rAt(r,i,o){return Uie(r,i,o,"weekdaysShort")}function iAt(r,i,o){return Uie(r,i,o,"weekdaysMin")}r6("en",{eras:[{since:"0001-01-01",until:1/0,offset:1,name:"Anno Domini",narrow:"AD",abbr:"AD"},{since:"0000-12-31",until:-1/0,offset:1,name:"Before Christ",narrow:"BC",abbr:"BC"}],dayOfMonthOrdinalParse:/\d{1,2}(th|st|nd|rd)/,ordinal:function(r){var i=r%10,o=Fa(r%100/10)===1?"th":i===1?"st":i===2?"nd":i===3?"rd":"th";return r+o}}),Ar.lang=C2("moment.lang is deprecated. Use moment.locale instead.",r6),Ar.langData=C2("moment.langData is deprecated. Use moment.localeData instead.",P3);var B3=Math.abs;function sAt(){var r=this._data;return this._milliseconds=B3(this._milliseconds),this._days=B3(this._days),this._months=B3(this._months),r.milliseconds=B3(r.milliseconds),r.seconds=B3(r.seconds),r.minutes=B3(r.minutes),r.hours=B3(r.hours),r.months=B3(r.months),r.years=B3(r.years),this}function G9e(r,i,o,l){var f=xv(i,o);return r._milliseconds+=l*f._milliseconds,r._days+=l*f._days,r._months+=l*f._months,r._bubble()}function aAt(r,i){return G9e(this,r,i,1)}function oAt(r,i){return G9e(this,r,i,-1)}function V9e(r){return r<0?Math.floor(r):Math.ceil(r)}function cAt(){var r=this._milliseconds,i=this._days,o=this._months,l=this._data,f,b,d,w,y;return r>=0&&i>=0&&o>=0||r<=0&&i<=0&&o<=0||(r+=V9e(qie(o)+i)*864e5,i=0,o=0),l.milliseconds=r%1e3,f=A2(r/1e3),l.seconds=f%60,b=A2(f/60),l.minutes=b%60,d=A2(b/60),l.hours=d%24,i+=A2(d/24),y=A2(U9e(i)),o+=y,i-=V9e(qie(y)),w=A2(o/12),o%=12,l.days=i,l.months=o,l.years=w,this}function U9e(r){return r*4800/146097}function qie(r){return r*146097/4800}function uAt(r){if(!this.isValid())return NaN;var i,o,l=this._milliseconds;if(r=S2(r),r==="month"||r==="quarter"||r==="year")switch(i=this._days+l/864e5,o=this._months+U9e(i),r){case"month":return o;case"quarter":return o/3;case"year":return o/12}else switch(i=this._days+Math.round(qie(this._months)),r){case"week":return i/7+l/6048e5;case"day":return i+l/864e5;case"hour":return i*24+l/36e5;case"minute":return i*1440+l/6e4;case"second":return i*86400+l/1e3;case"millisecond":return Math.floor(i*864e5)+l;default:throw new Error("Unknown unit "+r)}}function lAt(){return this.isValid()?this._milliseconds+this._days*864e5+this._months%12*2592e6+Fa(this._months/12)*31536e6:NaN}function F3(r){return function(){return this.as(r)}}var hAt=F3("ms"),fAt=F3("s"),dAt=F3("m"),gAt=F3("h"),pAt=F3("d"),bAt=F3("w"),vAt=F3("M"),wAt=F3("Q"),mAt=F3("y");function yAt(){return xv(this)}function kAt(r){return r=S2(r),this.isValid()?this[r+"s"]():NaN}function I8(r){return function(){return this.isValid()?this._data[r]:NaN}}var xAt=I8("milliseconds"),EAt=I8("seconds"),_At=I8("minutes"),TAt=I8("hours"),CAt=I8("days"),SAt=I8("months"),AAt=I8("years");function MAt(){return A2(this.days()/7)}var R3=Math.round,DT={ss:44,s:45,m:45,h:22,d:26,w:null,M:11};function DAt(r,i,o,l,f){return f.relativeTime(i||1,!!o,r,l)}function LAt(r,i,o,l){var f=xv(r).abs(),b=R3(f.as("s")),d=R3(f.as("m")),w=R3(f.as("h")),y=R3(f.as("d")),k=R3(f.as("M")),E=R3(f.as("w")),T=R3(f.as("y")),C=b<=o.ss&&["s",b]||b0,C[4]=l,DAt.apply(null,C)}function IAt(r){return r===void 0?R3:typeof r=="function"?(R3=r,!0):!1}function OAt(r,i){return DT[r]===void 0?!1:i===void 0?DT[r]:(DT[r]=i,r==="s"&&(DT.ss=i-1),!0)}function NAt(r,i){if(!this.isValid())return this.localeData().invalidDate();var o=!1,l=DT,f,b;return typeof r=="object"&&(i=r,r=!1),typeof r=="boolean"&&(o=r),typeof i=="object"&&(l=Object.assign({},DT,i),i.s!=null&&i.ss==null&&(l.ss=i.s-1)),f=this.localeData(),b=LAt(this,!o,l,f),o&&(b=f.pastFuture(+this,b)),f.postformat(b)}var Yie=Math.abs;function LT(r){return(r>0)-(r<0)||+r}function Sz(){if(!this.isValid())return this.localeData().invalidDate();var r=Yie(this._milliseconds)/1e3,i=Yie(this._days),o=Yie(this._months),l,f,b,d,w=this.asSeconds(),y,k,E,T;return w?(l=A2(r/60),f=A2(l/60),r%=60,l%=60,b=A2(o/12),o%=12,d=r?r.toFixed(3).replace(/\.?0+$/,""):"",y=w<0?"-":"",k=LT(this._months)!==LT(w)?"-":"",E=LT(this._days)!==LT(w)?"-":"",T=LT(this._milliseconds)!==LT(w)?"-":"",y+"P"+(b?k+b+"Y":"")+(o?k+o+"M":"")+(i?E+i+"D":"")+(f||l||r?"T":"")+(f?T+f+"H":"")+(l?T+l+"M":"")+(r?T+d+"S":"")):"P0D"}var vo=yz.prototype;vo.isValid=CCt,vo.abs=sAt,vo.add=aAt,vo.subtract=oAt,vo.as=uAt,vo.asMilliseconds=hAt,vo.asSeconds=fAt,vo.asMinutes=dAt,vo.asHours=gAt,vo.asDays=pAt,vo.asWeeks=bAt,vo.asMonths=vAt,vo.asQuarters=wAt,vo.asYears=mAt,vo.valueOf=lAt,vo._bubble=cAt,vo.clone=yAt,vo.get=kAt,vo.milliseconds=xAt,vo.seconds=EAt,vo.minutes=_At,vo.hours=TAt,vo.days=CAt,vo.weeks=MAt,vo.months=SAt,vo.years=AAt,vo.humanize=NAt,vo.toISOString=Sz,vo.toString=Sz,vo.toJSON=Sz,vo.locale=I9e,vo.localeData=N9e,vo.toIsoString=C2("toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)",Sz),vo.lang=O9e,Ki("X",0,0,"unix"),Ki("x",0,0,"valueOf"),ci("x",dz),ci("X",nTt),Mc("X",function(r,i,o){o._d=new Date(parseFloat(r)*1e3)}),Mc("x",function(r,i,o){o._d=new Date(Fa(r))});//! moment.js -Ar.version="2.29.4",P_t(mu),Ar.fn=xr,Ar.min=xCt,Ar.max=ECt,Ar.now=_Ct,Ar.utc=Tm,Ar.unix=ZSt,Ar.months=eAt,Ar.isDate=SL,Ar.locale=r6,Ar.invalid=sz,Ar.duration=xv,Ar.isMoment=yv,Ar.weekdays=nAt,Ar.parseZone=JSt,Ar.localeData=P3,Ar.isDuration=kz,Ar.monthsShort=tAt,Ar.weekdaysMin=iAt,Ar.defineLocale=Nie,Ar.updateLocale=JTt,Ar.locales=eCt,Ar.weekdaysShort=rAt,Ar.normalizeUnits=S2,Ar.relativeTimeRounding=IAt,Ar.relativeTimeThreshold=OAt,Ar.calendarFormat=KCt,Ar.prototype=xr,Ar.HTML5_FMT={DATETIME_LOCAL:"YYYY-MM-DDTHH:mm",DATETIME_LOCAL_SECONDS:"YYYY-MM-DDTHH:mm:ss",DATETIME_LOCAL_MS:"YYYY-MM-DDTHH:mm:ss.SSS",DATE:"YYYY-MM-DD",TIME:"HH:mm",TIME_SECONDS:"HH:mm:ss",TIME_MS:"HH:mm:ss.SSS",WEEK:"GGGG-[W]WW",MONTH:"YYYY-MM"};const j3={trace:0,debug:1,info:2,warn:3,error:4,fatal:5},Se={trace:(...r)=>{},debug:(...r)=>{},info:(...r)=>{},warn:(...r)=>{},error:(...r)=>{},fatal:(...r)=>{}},Wie=function(r="fatal"){let i=j3.fatal;typeof r=="string"?(r=r.toLowerCase(),r in j3&&(i=j3[r])):typeof r=="number"&&(i=r),Se.trace=()=>{},Se.debug=()=>{},Se.info=()=>{},Se.warn=()=>{},Se.error=()=>{},Se.fatal=()=>{},i<=j3.fatal&&(Se.fatal=console.error?console.error.bind(console,M2("FATAL"),"color: orange"):console.log.bind(console,"\x1B[35m",M2("FATAL"))),i<=j3.error&&(Se.error=console.error?console.error.bind(console,M2("ERROR"),"color: orange"):console.log.bind(console,"\x1B[31m",M2("ERROR"))),i<=j3.warn&&(Se.warn=console.warn?console.warn.bind(console,M2("WARN"),"color: orange"):console.log.bind(console,"\x1B[33m",M2("WARN"))),i<=j3.info&&(Se.info=console.info?console.info.bind(console,M2("INFO"),"color: lightblue"):console.log.bind(console,"\x1B[34m",M2("INFO"))),i<=j3.debug&&(Se.debug=console.debug?console.debug.bind(console,M2("DEBUG"),"color: lightgreen"):console.log.bind(console,"\x1B[32m",M2("DEBUG"))),i<=j3.trace&&(Se.trace=console.debug?console.debug.bind(console,M2("TRACE"),"color: lightgreen"):console.log.bind(console,"\x1B[32m",M2("TRACE")))},M2=r=>`%c${Ar().format("ss.SSS")} : ${r} : `;var Mm=typeof globalThis<"u"?globalThis:typeof window<"u"?window:typeof global<"u"?global:typeof self<"u"?self:{};function q9e(r){return r&&r.__esModule&&Object.prototype.hasOwnProperty.call(r,"default")?r.default:r}var Kie={};Object.defineProperty(Kie,"__esModule",{value:!0});var $3=Kie.sanitizeUrl=void 0,PAt=/^([^\w]*)(javascript|data|vbscript)/im,BAt=/&#(\w+)(^\w|;)?/g,FAt=/[\u0000-\u001F\u007F-\u009F\u2000-\u200D\uFEFF]/gim,RAt=/^([^:]+):/gm,jAt=[".","/"];function $At(r){return jAt.indexOf(r[0])>-1}function HAt(r){return r.replace(BAt,function(i,o){return String.fromCharCode(o)})}function zAt(r){var i=HAt(r||"").replace(FAt,"").trim();if(!i)return"about:blank";if($At(i))return i;var o=i.match(RAt);if(!o)return i;var l=o[0];return PAt.test(l)?"about:blank":i}$3=Kie.sanitizeUrl=zAt;function Az(r,i){return r==null||i==null?NaN:ri?1:r>=i?0:NaN}function GAt(r,i){return r==null||i==null?NaN:ir?1:i>=r?0:NaN}function Xie(r){let i,o,l;r.length!==2?(i=Az,o=(w,y)=>Az(r(w),y),l=(w,y)=>r(w)-y):(i=r===Az||r===GAt?r:VAt,o=r,l=r);function f(w,y,k=0,E=w.length){if(k>>1;o(w[T],y)<0?k=T+1:E=T}while(k>>1;o(w[T],y)<=0?k=T+1:E=T}while(kk&&l(w[T-1],y)>-l(w[T],y)?T-1:T}return{left:f,center:d,right:b}}function VAt(){return 0}function UAt(r){return r===null?NaN:+r}const qAt=Xie(Az).right;Xie(UAt).center;const YAt=qAt;class Y9e extends Map{constructor(i,o=XAt){if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:o}}),i!=null)for(const[l,f]of i)this.set(l,f)}get(i){return super.get(W9e(this,i))}has(i){return super.has(W9e(this,i))}set(i,o){return super.set(WAt(this,i),o)}delete(i){return super.delete(KAt(this,i))}}function W9e({_intern:r,_key:i},o){const l=i(o);return r.has(l)?r.get(l):o}function WAt({_intern:r,_key:i},o){const l=i(o);return r.has(l)?r.get(l):(r.set(l,o),o)}function KAt({_intern:r,_key:i},o){const l=i(o);return r.has(l)&&(o=r.get(l),r.delete(l)),o}function XAt(r){return r!==null&&typeof r=="object"?r.valueOf():r}var Qie=Math.sqrt(50),Zie=Math.sqrt(10),Jie=Math.sqrt(2);function QAt(r,i,o){var l,f=-1,b,d,w;if(i=+i,r=+r,o=+o,r===i&&o>0)return[r];if((l=i0){let y=Math.round(r/w),k=Math.round(i/w);for(y*wi&&--k,d=new Array(b=k-y+1);++fi&&--k,d=new Array(b=k-y+1);++f=0?(b>=Qie?10:b>=Zie?5:b>=Jie?2:1)*Math.pow(10,f):-Math.pow(10,-f)/(b>=Qie?10:b>=Zie?5:b>=Jie?2:1)}function ese(r,i,o){var l=Math.abs(i-r)/Math.max(0,o),f=Math.pow(10,Math.floor(Math.log(l)/Math.LN10)),b=l/f;return b>=Qie?f*=10:b>=Zie?f*=5:b>=Jie&&(f*=2),i=l)&&(o=l);else{let l=-1;for(let f of r)(f=i(f,++l,r))!=null&&(o=f)&&(o=f)}return o}function JAt(r,i){let o;if(i===void 0)for(const l of r)l!=null&&(o>l||o===void 0&&l>=l)&&(o=l);else{let l=-1;for(let f of r)(f=i(f,++l,r))!=null&&(o>f||o===void 0&&f>=f)&&(o=f)}return o}function eMt(r){return r}var Mz=1,tse=2,nse=3,Dz=4,X9e=1e-6;function tMt(r){return"translate("+r+",0)"}function nMt(r){return"translate(0,"+r+")"}function rMt(r){return i=>+r(i)}function iMt(r,i){return i=Math.max(0,r.bandwidth()-i*2)/2,r.round()&&(i=Math.round(i)),o=>+r(o)+i}function sMt(){return!this.__axis}function Q9e(r,i){var o=[],l=null,f=null,b=6,d=6,w=3,y=typeof window<"u"&&window.devicePixelRatio>1?0:.5,k=r===Mz||r===Dz?-1:1,E=r===Dz||r===tse?"x":"y",T=r===Mz||r===nse?tMt:nMt;function C(S){var L=l==null?i.ticks?i.ticks.apply(i,o):i.domain():l,O=f==null?i.tickFormat?i.tickFormat.apply(i,o):eMt:f,B=Math.max(b,0)+w,N=i.range(),F=+N[0]+y,R=+N[N.length-1]+y,q=(i.bandwidth?iMt:rMt)(i.copy(),y),X=S.selection?S.selection():S,te=X.selectAll(".domain").data([null]),H=X.selectAll(".tick").data(L,i).order(),Y=H.exit(),z=H.enter().append("g").attr("class","tick"),W=H.select("line"),Z=H.select("text");te=te.merge(te.enter().insert("path",".tick").attr("class","domain").attr("stroke","currentColor")),H=H.merge(z),W=W.merge(z.append("line").attr("stroke","currentColor").attr(E+"2",k*b)),Z=Z.merge(z.append("text").attr("fill","currentColor").attr(E,k*B).attr("dy",r===Mz?"0em":r===nse?"0.71em":"0.32em")),S!==X&&(te=te.transition(S),H=H.transition(S),W=W.transition(S),Z=Z.transition(S),Y=Y.transition(S).attr("opacity",X9e).attr("transform",function(G){return isFinite(G=q(G))?T(G+y):this.getAttribute("transform")}),z.attr("opacity",X9e).attr("transform",function(G){var ae=this.parentNode.__axis;return T((ae&&isFinite(ae=ae(G))?ae:q(G))+y)})),Y.remove(),te.attr("d",r===Dz||r===tse?d?"M"+k*d+","+F+"H"+y+"V"+R+"H"+k*d:"M"+y+","+F+"V"+R:d?"M"+F+","+k*d+"V"+y+"H"+R+"V"+k*d:"M"+F+","+y+"H"+R),H.attr("opacity",1).attr("transform",function(G){return T(q(G)+y)}),W.attr(E+"2",k*b),Z.attr(E,k*B).text(O),X.filter(sMt).attr("fill","none").attr("font-size",10).attr("font-family","sans-serif").attr("text-anchor",r===tse?"start":r===Dz?"end":"middle"),X.each(function(){this.__axis=q})}return C.scale=function(S){return arguments.length?(i=S,C):i},C.ticks=function(){return o=Array.from(arguments),C},C.tickArguments=function(S){return arguments.length?(o=S==null?[]:Array.from(S),C):o.slice()},C.tickValues=function(S){return arguments.length?(l=S==null?null:Array.from(S),C):l&&l.slice()},C.tickFormat=function(S){return arguments.length?(f=S,C):f},C.tickSize=function(S){return arguments.length?(b=d=+S,C):b},C.tickSizeInner=function(S){return arguments.length?(b=+S,C):b},C.tickSizeOuter=function(S){return arguments.length?(d=+S,C):d},C.tickPadding=function(S){return arguments.length?(w=+S,C):w},C.offset=function(S){return arguments.length?(y=+S,C):y},C}function aMt(r){return Q9e(Mz,r)}function oMt(r){return Q9e(nse,r)}var cMt={value:()=>{}};function Z9e(){for(var r=0,i=arguments.length,o={},l;r=0&&(l=o.slice(f+1),o=o.slice(0,f)),o&&!i.hasOwnProperty(o))throw new Error("unknown type: "+o);return{type:o,name:l}})}Lz.prototype=Z9e.prototype={constructor:Lz,on:function(r,i){var o=this._,l=uMt(r+"",o),f,b=-1,d=l.length;if(arguments.length<2){for(;++b0)for(var o=new Array(f),l=0,f,b;l=0&&(i=r.slice(0,o))!=="xmlns"&&(r=r.slice(o+1)),e_e.hasOwnProperty(i)?{space:e_e[i],local:r}:r}function hMt(r){return function(){var i=this.ownerDocument,o=this.namespaceURI;return o===rse&&i.documentElement.namespaceURI===rse?i.createElement(r):i.createElementNS(o,r)}}function fMt(r){return function(){return this.ownerDocument.createElementNS(r.space,r.local)}}function t_e(r){var i=Iz(r);return(i.local?fMt:hMt)(i)}function dMt(){}function ise(r){return r==null?dMt:function(){return this.querySelector(r)}}function gMt(r){typeof r!="function"&&(r=ise(r));for(var i=this._groups,o=i.length,l=new Array(o),f=0;f=R&&(R=F+1);!(X=B[R])&&++R=0;)(d=l[f])&&(b&&d.compareDocumentPosition(b)^4&&b.parentNode.insertBefore(d,b),b=d);return this}function RMt(r){r||(r=jMt);function i(T,C){return T&&C?r(T.__data__,C.__data__):!T-!C}for(var o=this._groups,l=o.length,f=new Array(l),b=0;bi?1:r>=i?0:NaN}function $Mt(){var r=arguments[0];return arguments[0]=this,r.apply(null,arguments),this}function HMt(){return Array.from(this)}function zMt(){for(var r=this._groups,i=0,o=r.length;i1?this.each((i==null?JMt:typeof i=="function"?tDt:eDt)(r,i,o==null?"":o)):IT(this.node(),r)}function IT(r,i){return r.style.getPropertyValue(i)||o_e(r).getComputedStyle(r,null).getPropertyValue(i)}function rDt(r){return function(){delete this[r]}}function iDt(r,i){return function(){this[r]=i}}function sDt(r,i){return function(){var o=i.apply(this,arguments);o==null?delete this[r]:this[r]=o}}function aDt(r,i){return arguments.length>1?this.each((i==null?rDt:typeof i=="function"?sDt:iDt)(r,i)):this.node()[r]}function c_e(r){return r.trim().split(/^|\s+/)}function sse(r){return r.classList||new u_e(r)}function u_e(r){this._node=r,this._names=c_e(r.getAttribute("class")||"")}u_e.prototype={add:function(r){var i=this._names.indexOf(r);i<0&&(this._names.push(r),this._node.setAttribute("class",this._names.join(" ")))},remove:function(r){var i=this._names.indexOf(r);i>=0&&(this._names.splice(i,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(r){return this._names.indexOf(r)>=0}};function l_e(r,i){for(var o=sse(r),l=-1,f=i.length;++l=0&&(o=i.slice(l+1),i=i.slice(0,l)),{type:i,name:o}})}function NDt(r){return function(){var i=this.__on;if(!!i){for(var o=0,l=-1,f=i.length,b;o>8&15|i>>4&240,i>>4&15|i&240,(i&15)<<4|i&15,1):o===8?Bz(i>>24&255,i>>16&255,i>>8&255,(i&255)/255):o===4?Bz(i>>12&15|i>>8&240,i>>8&15|i>>4&240,i>>4&15|i&240,((i&15)<<4|i&15)/255):null):(i=GDt.exec(r))?new sd(i[1],i[2],i[3],1):(i=VDt.exec(r))?new sd(i[1]*255/100,i[2]*255/100,i[3]*255/100,1):(i=UDt.exec(r))?Bz(i[1],i[2],i[3],i[4]):(i=qDt.exec(r))?Bz(i[1]*255/100,i[2]*255/100,i[3]*255/100,i[4]):(i=YDt.exec(r))?y_e(i[1],i[2]/100,i[3]/100,1):(i=WDt.exec(r))?y_e(i[1],i[2]/100,i[3]/100,i[4]):d_e.hasOwnProperty(r)?b_e(d_e[r]):r==="transparent"?new sd(NaN,NaN,NaN,0):null}function b_e(r){return new sd(r>>16&255,r>>8&255,r&255,1)}function Bz(r,i,o,l){return l<=0&&(r=i=o=NaN),new sd(r,i,o,l)}function v_e(r){return r instanceof O8||(r=N8(r)),r?(r=r.rgb(),new sd(r.r,r.g,r.b,r.opacity)):new sd}function cse(r,i,o,l){return arguments.length===1?v_e(r):new sd(r,i,o,l==null?1:l)}function sd(r,i,o,l){this.r=+r,this.g=+i,this.b=+o,this.opacity=+l}jL(sd,cse,Nz(O8,{brighter(r){return r=r==null?Pz:Math.pow(Pz,r),new sd(this.r*r,this.g*r,this.b*r,this.opacity)},darker(r){return r=r==null?$L:Math.pow($L,r),new sd(this.r*r,this.g*r,this.b*r,this.opacity)},rgb(){return this},clamp(){return new sd(P8(this.r),P8(this.g),P8(this.b),Fz(this.opacity))},displayable(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:w_e,formatHex:w_e,formatHex8:QDt,formatRgb:m_e,toString:m_e}));function w_e(){return`#${B8(this.r)}${B8(this.g)}${B8(this.b)}`}function QDt(){return`#${B8(this.r)}${B8(this.g)}${B8(this.b)}${B8((isNaN(this.opacity)?1:this.opacity)*255)}`}function m_e(){const r=Fz(this.opacity);return`${r===1?"rgb(":"rgba("}${P8(this.r)}, ${P8(this.g)}, ${P8(this.b)}${r===1?")":`, ${r})`}`}function Fz(r){return isNaN(r)?1:Math.max(0,Math.min(1,r))}function P8(r){return Math.max(0,Math.min(255,Math.round(r)||0))}function B8(r){return r=P8(r),(r<16?"0":"")+r.toString(16)}function y_e(r,i,o,l){return l<=0?r=i=o=NaN:o<=0||o>=1?r=i=NaN:i<=0&&(r=NaN),new Ev(r,i,o,l)}function k_e(r){if(r instanceof Ev)return new Ev(r.h,r.s,r.l,r.opacity);if(r instanceof O8||(r=N8(r)),!r)return new Ev;if(r instanceof Ev)return r;r=r.rgb();var i=r.r/255,o=r.g/255,l=r.b/255,f=Math.min(i,o,l),b=Math.max(i,o,l),d=NaN,w=b-f,y=(b+f)/2;return w?(i===b?d=(o-l)/w+(o0&&y<1?0:d,new Ev(d,w,y,r.opacity)}function ZDt(r,i,o,l){return arguments.length===1?k_e(r):new Ev(r,i,o,l==null?1:l)}function Ev(r,i,o,l){this.h=+r,this.s=+i,this.l=+o,this.opacity=+l}jL(Ev,ZDt,Nz(O8,{brighter(r){return r=r==null?Pz:Math.pow(Pz,r),new Ev(this.h,this.s,this.l*r,this.opacity)},darker(r){return r=r==null?$L:Math.pow($L,r),new Ev(this.h,this.s,this.l*r,this.opacity)},rgb(){var r=this.h%360+(this.h<0)*360,i=isNaN(r)||isNaN(this.s)?0:this.s,o=this.l,l=o+(o<.5?o:1-o)*i,f=2*o-l;return new sd(use(r>=240?r-240:r+120,f,l),use(r,f,l),use(r<120?r+240:r-120,f,l),this.opacity)},clamp(){return new Ev(x_e(this.h),Rz(this.s),Rz(this.l),Fz(this.opacity))},displayable(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl(){const r=Fz(this.opacity);return`${r===1?"hsl(":"hsla("}${x_e(this.h)}, ${Rz(this.s)*100}%, ${Rz(this.l)*100}%${r===1?")":`, ${r})`}`}}));function x_e(r){return r=(r||0)%360,r<0?r+360:r}function Rz(r){return Math.max(0,Math.min(1,r||0))}function use(r,i,o){return(r<60?i+(o-i)*r/60:r<180?o:r<240?i+(o-i)*(240-r)/60:i)*255}const JDt=Math.PI/180,eLt=180/Math.PI,jz=18,E_e=.96422,__e=1,T_e=.82521,C_e=4/29,NT=6/29,S_e=3*NT*NT,tLt=NT*NT*NT;function A_e(r){if(r instanceof Lm)return new Lm(r.l,r.a,r.b,r.opacity);if(r instanceof H3)return M_e(r);r instanceof sd||(r=v_e(r));var i=dse(r.r),o=dse(r.g),l=dse(r.b),f=lse((.2225045*i+.7168786*o+.0606169*l)/__e),b,d;return i===o&&o===l?b=d=f:(b=lse((.4360747*i+.3850649*o+.1430804*l)/E_e),d=lse((.0139322*i+.0971045*o+.7141733*l)/T_e)),new Lm(116*f-16,500*(b-f),200*(f-d),r.opacity)}function nLt(r,i,o,l){return arguments.length===1?A_e(r):new Lm(r,i,o,l==null?1:l)}function Lm(r,i,o,l){this.l=+r,this.a=+i,this.b=+o,this.opacity=+l}jL(Lm,nLt,Nz(O8,{brighter(r){return new Lm(this.l+jz*(r==null?1:r),this.a,this.b,this.opacity)},darker(r){return new Lm(this.l-jz*(r==null?1:r),this.a,this.b,this.opacity)},rgb(){var r=(this.l+16)/116,i=isNaN(this.a)?r:r+this.a/500,o=isNaN(this.b)?r:r-this.b/200;return i=E_e*hse(i),r=__e*hse(r),o=T_e*hse(o),new sd(fse(3.1338561*i-1.6168667*r-.4906146*o),fse(-.9787684*i+1.9161415*r+.033454*o),fse(.0719453*i-.2289914*r+1.4052427*o),this.opacity)}}));function lse(r){return r>tLt?Math.pow(r,1/3):r/S_e+C_e}function hse(r){return r>NT?r*r*r:S_e*(r-C_e)}function fse(r){return 255*(r<=.0031308?12.92*r:1.055*Math.pow(r,1/2.4)-.055)}function dse(r){return(r/=255)<=.04045?r/12.92:Math.pow((r+.055)/1.055,2.4)}function rLt(r){if(r instanceof H3)return new H3(r.h,r.c,r.l,r.opacity);if(r instanceof Lm||(r=A_e(r)),r.a===0&&r.b===0)return new H3(NaN,0()=>r;function D_e(r,i){return function(o){return r+o*i}}function iLt(r,i,o){return r=Math.pow(r,o),i=Math.pow(i,o)-r,o=1/o,function(l){return Math.pow(r+l*i,o)}}function sLt(r,i){var o=i-r;return o?D_e(r,o>180||o<-180?o-360*Math.round(o/360):o):$z(isNaN(r)?i:r)}function aLt(r){return(r=+r)==1?zL:function(i,o){return o-i?iLt(i,o,r):$z(isNaN(i)?o:i)}}function zL(r,i){var o=i-r;return o?D_e(r,o):$z(isNaN(r)?i:r)}const Hz=function r(i){var o=aLt(i);function l(f,b){var d=o((f=cse(f)).r,(b=cse(b)).r),w=o(f.g,b.g),y=o(f.b,b.b),k=zL(f.opacity,b.opacity);return function(E){return f.r=d(E),f.g=w(E),f.b=y(E),f.opacity=k(E),f+""}}return l.gamma=r,l}(1);function oLt(r,i){i||(i=[]);var o=r?Math.min(i.length,r.length):0,l=i.slice(),f;return function(b){for(f=0;fo&&(b=i.slice(o,b),w[d]?w[d]+=b:w[++d]=b),(l=l[0])===(f=f[0])?w[d]?w[d]+=f:w[++d]=f:(w[++d]=null,y.push({i:d,x:_v(l,f)})),o=bse.lastIndex;return o180?E+=360:E-k>180&&(k+=360),C.push({i:T.push(f(T)+"rotate(",null,l)-2,x:_v(k,E)})):E&&T.push(f(T)+"rotate("+E+l)}function w(k,E,T,C){k!==E?C.push({i:T.push(f(T)+"skewX(",null,l)-2,x:_v(k,E)}):E&&T.push(f(T)+"skewX("+E+l)}function y(k,E,T,C,S,L){if(k!==T||E!==C){var O=S.push(f(S)+"scale(",null,",",null,")");L.push({i:O-4,x:_v(k,T)},{i:O-2,x:_v(E,C)})}else(T!==1||C!==1)&&S.push(f(S)+"scale("+T+","+C+")")}return function(k,E){var T=[],C=[];return k=r(k),E=r(E),b(k.translateX,k.translateY,E.translateX,E.translateY,T,C),d(k.rotate,E.rotate,T,C),w(k.skewX,E.skewX,T,C),y(k.scaleX,k.scaleY,E.scaleX,E.scaleY,T,C),k=E=null,function(S){for(var L=-1,O=C.length,B;++L=0&&r._call.call(void 0,i),r=r._next;--PT}function R_e(){F8=(Vz=qL.now())+Uz,PT=GL=0;try{xLt()}finally{PT=0,_Lt(),F8=0}}function ELt(){var r=qL.now(),i=r-Vz;i>P_e&&(Uz-=i,Vz=r)}function _Lt(){for(var r,i=Gz,o,l=1/0;i;)i._call?(l>i._time&&(l=i._time),r=i,i=i._next):(o=i._next,i._next=null,i=r?r._next=o:Gz=o);UL=r,yse(l)}function yse(r){if(!PT){GL&&(GL=clearTimeout(GL));var i=r-F8;i>24?(r<1/0&&(GL=setTimeout(R_e,r-qL.now()-Uz)),VL&&(VL=clearInterval(VL))):(VL||(Vz=qL.now(),VL=setInterval(ELt,P_e)),PT=1,B_e(R_e))}}function j_e(r,i,o){var l=new qz;return i=i==null?0:+i,l.restart(f=>{l.stop(),r(f+i)},i,o),l}var TLt=Z9e("start","end","cancel","interrupt"),CLt=[],$_e=0,H_e=1,kse=2,Yz=3,z_e=4,xse=5,Wz=6;function Kz(r,i,o,l,f,b){var d=r.__transition;if(!d)r.__transition={};else if(o in d)return;SLt(r,o,{name:i,index:l,group:f,on:TLt,tween:CLt,time:b.time,delay:b.delay,duration:b.duration,ease:b.ease,timer:null,state:$_e})}function Ese(r,i){var o=Tv(r,i);if(o.state>$_e)throw new Error("too late; already scheduled");return o}function Im(r,i){var o=Tv(r,i);if(o.state>Yz)throw new Error("too late; already running");return o}function Tv(r,i){var o=r.__transition;if(!o||!(o=o[i]))throw new Error("transition not found");return o}function SLt(r,i,o){var l=r.__transition,f;l[i]=o,o.timer=F_e(b,0,o.time);function b(k){o.state=H_e,o.timer.restart(d,o.delay,o.time),o.delay<=k&&d(k-o.delay)}function d(k){var E,T,C,S;if(o.state!==H_e)return y();for(E in l)if(S=l[E],S.name===o.name){if(S.state===Yz)return j_e(d);S.state===z_e?(S.state=Wz,S.timer.stop(),S.on.call("interrupt",r,r.__data__,S.index,S.group),delete l[E]):+Ekse&&l.state=0&&(i=i.slice(0,o)),!i||i==="start"})}function iIt(r,i,o){var l,f,b=rIt(i)?Ese:Im;return function(){var d=b(this,r),w=d.on;w!==l&&(f=(l=w).copy()).on(i,o),d.on=f}}function sIt(r,i){var o=this._id;return arguments.length<2?Tv(this.node(),o).on.on(r):this.each(iIt(o,r,i))}function aIt(r){return function(){var i=this.parentNode;for(var o in this.__transition)if(+o!==r)return;i&&i.removeChild(this)}}function oIt(){return this.on("end.remove",aIt(this._id))}function cIt(r){var i=this._name,o=this._id;typeof r!="function"&&(r=ise(r));for(var l=this._groups,f=l.length,b=new Array(f),d=0;dR8)if(!(Math.abs(E*w-y*k)>R8)||!f)this._+="L"+(this._x1=r)+","+(this._y1=i);else{var C=o-b,S=l-d,L=w*w+y*y,O=C*C+S*S,B=Math.sqrt(L),N=Math.sqrt(T),F=f*Math.tan((Tse-Math.acos((L+T-O)/(2*B*N)))/2),R=F/N,q=F/B;Math.abs(R-1)>R8&&(this._+="L"+(r+R*k)+","+(i+R*E)),this._+="A"+f+","+f+",0,0,"+ +(E*C>k*S)+","+(this._x1=r+q*w)+","+(this._y1=i+q*y)}},arc:function(r,i,o,l,f,b){r=+r,i=+i,o=+o,b=!!b;var d=o*Math.cos(l),w=o*Math.sin(l),y=r+d,k=i+w,E=1^b,T=b?l-f:f-l;if(o<0)throw new Error("negative radius: "+o);this._x1===null?this._+="M"+y+","+k:(Math.abs(this._x1-y)>R8||Math.abs(this._y1-k)>R8)&&(this._+="L"+y+","+k),o&&(T<0&&(T=T%Cse+Cse),T>OIt?this._+="A"+o+","+o+",0,1,"+E+","+(r-d)+","+(i-w)+"A"+o+","+o+",0,1,"+E+","+(this._x1=y)+","+(this._y1=k):T>R8&&(this._+="A"+o+","+o+",0,"+ +(T>=Tse)+","+E+","+(this._x1=r+o*Math.cos(f))+","+(this._y1=i+o*Math.sin(f))))},rect:function(r,i,o,l){this._+="M"+(this._x0=this._x1=+r)+","+(this._y0=this._y1=+i)+"h"+ +o+"v"+ +l+"h"+-o+"Z"},toString:function(){return this._}};function NIt(r){if(!r.ok)throw new Error(r.status+" "+r.statusText);return r.text()}function PIt(r,i){return fetch(r,i).then(NIt)}function BIt(r){return(i,o)=>PIt(i,o).then(l=>new DOMParser().parseFromString(l,r))}var FIt=BIt("image/svg+xml");function RIt(r){return Math.abs(r=Math.round(r))>=1e21?r.toLocaleString("en").replace(/,/g,""):r.toString(10)}function Xz(r,i){if((o=(r=i?r.toExponential(i-1):r.toExponential()).indexOf("e"))<0)return null;var o,l=r.slice(0,o);return[l.length>1?l[0]+l.slice(2):l,+r.slice(o+1)]}function BT(r){return r=Xz(Math.abs(r)),r?r[1]:NaN}function jIt(r,i){return function(o,l){for(var f=o.length,b=[],d=0,w=r[0],y=0;f>0&&w>0&&(y+w+1>l&&(w=Math.max(1,l-y)),b.push(o.substring(f-=w,f+w)),!((y+=w+1)>l));)w=r[d=(d+1)%r.length];return b.reverse().join(i)}}function $It(r){return function(i){return i.replace(/[0-9]/g,function(o){return r[+o]})}}var HIt=/^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;function Qz(r){if(!(i=HIt.exec(r)))throw new Error("invalid format: "+r);var i;return new Mse({fill:i[1],align:i[2],sign:i[3],symbol:i[4],zero:i[5],width:i[6],comma:i[7],precision:i[8]&&i[8].slice(1),trim:i[9],type:i[10]})}Qz.prototype=Mse.prototype;function Mse(r){this.fill=r.fill===void 0?" ":r.fill+"",this.align=r.align===void 0?">":r.align+"",this.sign=r.sign===void 0?"-":r.sign+"",this.symbol=r.symbol===void 0?"":r.symbol+"",this.zero=!!r.zero,this.width=r.width===void 0?void 0:+r.width,this.comma=!!r.comma,this.precision=r.precision===void 0?void 0:+r.precision,this.trim=!!r.trim,this.type=r.type===void 0?"":r.type+""}Mse.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(this.width===void 0?"":Math.max(1,this.width|0))+(this.comma?",":"")+(this.precision===void 0?"":"."+Math.max(0,this.precision|0))+(this.trim?"~":"")+this.type};function zIt(r){e:for(var i=r.length,o=1,l=-1,f;o0&&(l=0);break}return l>0?r.slice(0,l)+r.slice(f+1):r}var q_e;function GIt(r,i){var o=Xz(r,i);if(!o)return r+"";var l=o[0],f=o[1],b=f-(q_e=Math.max(-8,Math.min(8,Math.floor(f/3)))*3)+1,d=l.length;return b===d?l:b>d?l+new Array(b-d+1).join("0"):b>0?l.slice(0,b)+"."+l.slice(b):"0."+new Array(1-b).join("0")+Xz(r,Math.max(0,i+b-1))[0]}function Y_e(r,i){var o=Xz(r,i);if(!o)return r+"";var l=o[0],f=o[1];return f<0?"0."+new Array(-f).join("0")+l:l.length>f+1?l.slice(0,f+1)+"."+l.slice(f+1):l+new Array(f-l.length+2).join("0")}const W_e={"%":(r,i)=>(r*100).toFixed(i),b:r=>Math.round(r).toString(2),c:r=>r+"",d:RIt,e:(r,i)=>r.toExponential(i),f:(r,i)=>r.toFixed(i),g:(r,i)=>r.toPrecision(i),o:r=>Math.round(r).toString(8),p:(r,i)=>Y_e(r*100,i),r:Y_e,s:GIt,X:r=>Math.round(r).toString(16).toUpperCase(),x:r=>Math.round(r).toString(16)};function K_e(r){return r}var X_e=Array.prototype.map,Q_e=["y","z","a","f","p","n","\xB5","m","","k","M","G","T","P","E","Z","Y"];function VIt(r){var i=r.grouping===void 0||r.thousands===void 0?K_e:jIt(X_e.call(r.grouping,Number),r.thousands+""),o=r.currency===void 0?"":r.currency[0]+"",l=r.currency===void 0?"":r.currency[1]+"",f=r.decimal===void 0?".":r.decimal+"",b=r.numerals===void 0?K_e:$It(X_e.call(r.numerals,String)),d=r.percent===void 0?"%":r.percent+"",w=r.minus===void 0?"\u2212":r.minus+"",y=r.nan===void 0?"NaN":r.nan+"";function k(T){T=Qz(T);var C=T.fill,S=T.align,L=T.sign,O=T.symbol,B=T.zero,N=T.width,F=T.comma,R=T.precision,q=T.trim,X=T.type;X==="n"?(F=!0,X="g"):W_e[X]||(R===void 0&&(R=12),q=!0,X="g"),(B||C==="0"&&S==="=")&&(B=!0,C="0",S="=");var te=O==="$"?o:O==="#"&&/[boxX]/.test(X)?"0"+X.toLowerCase():"",H=O==="$"?l:/[%p]/.test(X)?d:"",Y=W_e[X],z=/[defgprs%]/.test(X);R=R===void 0?6:/[gprs]/.test(X)?Math.max(1,Math.min(21,R)):Math.max(0,Math.min(20,R));function W(Z){var G=te,ae=H,$,ge,ee;if(X==="c")ae=Y(Z)+ae,Z="";else{Z=+Z;var de=Z<0||1/Z<0;if(Z=isNaN(Z)?y:Y(Math.abs(Z),R),q&&(Z=zIt(Z)),de&&+Z==0&&L!=="+"&&(de=!1),G=(de?L==="("?L:w:L==="-"||L==="("?"":L)+G,ae=(X==="s"?Q_e[8+q_e/3]:"")+ae+(de&&L==="("?")":""),z){for($=-1,ge=Z.length;++$ee||ee>57){ae=(ee===46?f+Z.slice($+1):Z.slice($))+ae,Z=Z.slice(0,$);break}}}F&&!B&&(Z=i(Z,1/0));var re=G.length+Z.length+ae.length,ke=re>1)+G+Z+ae+ke.slice(re);break;default:Z=ke+G+Z+ae;break}return b(Z)}return W.toString=function(){return T+""},W}function E(T,C){var S=k((T=Qz(T),T.type="f",T)),L=Math.max(-8,Math.min(8,Math.floor(BT(C)/3)))*3,O=Math.pow(10,-L),B=Q_e[8+L/3];return function(N){return S(O*N)+B}}return{format:k,formatPrefix:E}}var Zz,Z_e,J_e;UIt({thousands:",",grouping:[3],currency:["$",""]});function UIt(r){return Zz=VIt(r),Z_e=Zz.format,J_e=Zz.formatPrefix,Zz}function qIt(r){return Math.max(0,-BT(Math.abs(r)))}function YIt(r,i){return Math.max(0,Math.max(-8,Math.min(8,Math.floor(BT(i)/3)))*3-BT(Math.abs(r)))}function WIt(r,i){return r=Math.abs(r),i=Math.abs(i)-r,Math.max(0,BT(i)-BT(r))+1}function Dse(r,i){switch(arguments.length){case 0:break;case 1:this.range(r);break;default:this.range(i).domain(r);break}return this}const eTe=Symbol("implicit");function tTe(){var r=new Y9e,i=[],o=[],l=eTe;function f(b){let d=r.get(b);if(d===void 0){if(l!==eTe)return l;r.set(b,d=i.push(b)-1)}return o[d%o.length]}return f.domain=function(b){if(!arguments.length)return i.slice();i=[],r=new Y9e;for(const d of b)r.has(d)||r.set(d,i.push(d)-1);return f},f.range=function(b){return arguments.length?(o=Array.from(b),f):o.slice()},f.unknown=function(b){return arguments.length?(l=b,f):l},f.copy=function(){return tTe(i,o).unknown(l)},Dse.apply(f,arguments),f}function KIt(r){return function(){return r}}function XIt(r){return+r}var nTe=[0,1];function FT(r){return r}function Lse(r,i){return(i-=r=+r)?function(o){return(o-r)/i}:KIt(isNaN(i)?NaN:.5)}function QIt(r,i){var o;return r>i&&(o=r,r=i,i=o),function(l){return Math.max(r,Math.min(i,l))}}function ZIt(r,i,o){var l=r[0],f=r[1],b=i[0],d=i[1];return f2?JIt:ZIt,y=k=null,T}function T(C){return C==null||isNaN(C=+C)?b:(y||(y=w(r.map(l),i,o)))(l(d(C)))}return T.invert=function(C){return d(f((k||(k=w(i,r.map(l),_v)))(C)))},T.domain=function(C){return arguments.length?(r=Array.from(C,XIt),E()):r.slice()},T.range=function(C){return arguments.length?(i=Array.from(C),E()):i.slice()},T.rangeRound=function(C){return i=Array.from(C),o=gLt,E()},T.clamp=function(C){return arguments.length?(d=C?!0:FT,E()):d!==FT},T.interpolate=function(C){return arguments.length?(o=C,E()):o},T.unknown=function(C){return arguments.length?(b=C,T):b},function(C,S){return l=C,f=S,E()}}function iTe(){return eOt()(FT,FT)}function tOt(r,i,o,l){var f=ese(r,i,o),b;switch(l=Qz(l==null?",f":l),l.type){case"s":{var d=Math.max(Math.abs(r),Math.abs(i));return l.precision==null&&!isNaN(b=YIt(f,d))&&(l.precision=b),J_e(l,d)}case"":case"e":case"g":case"p":case"r":{l.precision==null&&!isNaN(b=WIt(f,Math.max(Math.abs(r),Math.abs(i))))&&(l.precision=b-(l.type==="e"));break}case"f":case"%":{l.precision==null&&!isNaN(b=qIt(f))&&(l.precision=b-(l.type==="%")*2);break}}return Z_e(l)}function nOt(r){var i=r.domain;return r.ticks=function(o){var l=i();return QAt(l[0],l[l.length-1],o==null?10:o)},r.tickFormat=function(o,l){var f=i();return tOt(f[0],f[f.length-1],o==null?10:o,l)},r.nice=function(o){o==null&&(o=10);var l=i(),f=0,b=l.length-1,d=l[f],w=l[b],y,k,E=10;for(w0;){if(k=K9e(d,w,o),k===y)return l[f]=d,l[b]=w,i(l);if(k>0)d=Math.floor(d/k)*k,w=Math.ceil(w/k)*k;else if(k<0)d=Math.ceil(d*k)/k,w=Math.floor(w*k)/k;else break;y=k}return r},r}function sTe(){var r=iTe();return r.copy=function(){return rTe(r,sTe())},Dse.apply(r,arguments),nOt(r)}function rOt(r,i){r=r.slice();var o=0,l=r.length-1,f=r[o],b=r[l],d;return b0))return y;do y.push(k=new Date(+b)),i(b,w),r(b);while(k=d)for(;r(d),!b(d);)d.setTime(d-1)},function(d,w){if(d>=d)if(w<0)for(;++w<=0;)for(;i(d,-1),!b(d););else for(;--w>=0;)for(;i(d,1),!b(d););})},o&&(f.count=function(b,d){return Ise.setTime(+b),Ose.setTime(+d),r(Ise),r(Ose),Math.floor(o(Ise,Ose))},f.every=function(b){return b=Math.floor(b),!isFinite(b)||!(b>0)?null:b>1?f.filter(l?function(d){return l(d)%b===0}:function(d){return f.count(0,d)%b===0}):f}),f}var Jz=Uf(function(){},function(r,i){r.setTime(+r+i)},function(r,i){return i-r});Jz.every=function(r){return r=Math.floor(r),!isFinite(r)||!(r>0)?null:r>1?Uf(function(i){i.setTime(Math.floor(i/r)*r)},function(i,o){i.setTime(+i+o*r)},function(i,o){return(o-i)/r}):Jz};const iOt=Jz;Jz.range;const V3=1e3,D2=V3*60,U3=D2*60,j8=U3*24,Nse=j8*7,aTe=j8*30,Pse=j8*365;var oTe=Uf(function(r){r.setTime(r-r.getMilliseconds())},function(r,i){r.setTime(+r+i*V3)},function(r,i){return(i-r)/V3},function(r){return r.getUTCSeconds()});const YL=oTe;oTe.range;var cTe=Uf(function(r){r.setTime(r-r.getMilliseconds()-r.getSeconds()*V3)},function(r,i){r.setTime(+r+i*D2)},function(r,i){return(i-r)/D2},function(r){return r.getMinutes()});const eG=cTe;cTe.range;var uTe=Uf(function(r){r.setTime(r-r.getMilliseconds()-r.getSeconds()*V3-r.getMinutes()*D2)},function(r,i){r.setTime(+r+i*U3)},function(r,i){return(i-r)/U3},function(r){return r.getHours()});const tG=uTe;uTe.range;var lTe=Uf(r=>r.setHours(0,0,0,0),(r,i)=>r.setDate(r.getDate()+i),(r,i)=>(i-r-(i.getTimezoneOffset()-r.getTimezoneOffset())*D2)/j8,r=>r.getDate()-1);const RT=lTe;lTe.range;function $8(r){return Uf(function(i){i.setDate(i.getDate()-(i.getDay()+7-r)%7),i.setHours(0,0,0,0)},function(i,o){i.setDate(i.getDate()+o*7)},function(i,o){return(o-i-(o.getTimezoneOffset()-i.getTimezoneOffset())*D2)/Nse})}var jT=$8(0),nG=$8(1),sOt=$8(2),aOt=$8(3),$T=$8(4),oOt=$8(5),cOt=$8(6);jT.range,nG.range,sOt.range,aOt.range,$T.range,oOt.range,cOt.range;var hTe=Uf(function(r){r.setDate(1),r.setHours(0,0,0,0)},function(r,i){r.setMonth(r.getMonth()+i)},function(r,i){return i.getMonth()-r.getMonth()+(i.getFullYear()-r.getFullYear())*12},function(r){return r.getMonth()});const rG=hTe;hTe.range;var Bse=Uf(function(r){r.setMonth(0,1),r.setHours(0,0,0,0)},function(r,i){r.setFullYear(r.getFullYear()+i)},function(r,i){return i.getFullYear()-r.getFullYear()},function(r){return r.getFullYear()});Bse.every=function(r){return!isFinite(r=Math.floor(r))||!(r>0)?null:Uf(function(i){i.setFullYear(Math.floor(i.getFullYear()/r)*r),i.setMonth(0,1),i.setHours(0,0,0,0)},function(i,o){i.setFullYear(i.getFullYear()+o*r)})};const H8=Bse;Bse.range;var fTe=Uf(function(r){r.setUTCSeconds(0,0)},function(r,i){r.setTime(+r+i*D2)},function(r,i){return(i-r)/D2},function(r){return r.getUTCMinutes()});const uOt=fTe;fTe.range;var dTe=Uf(function(r){r.setUTCMinutes(0,0,0)},function(r,i){r.setTime(+r+i*U3)},function(r,i){return(i-r)/U3},function(r){return r.getUTCHours()});const lOt=dTe;dTe.range;var gTe=Uf(function(r){r.setUTCHours(0,0,0,0)},function(r,i){r.setUTCDate(r.getUTCDate()+i)},function(r,i){return(i-r)/j8},function(r){return r.getUTCDate()-1});const Fse=gTe;gTe.range;function z8(r){return Uf(function(i){i.setUTCDate(i.getUTCDate()-(i.getUTCDay()+7-r)%7),i.setUTCHours(0,0,0,0)},function(i,o){i.setUTCDate(i.getUTCDate()+o*7)},function(i,o){return(o-i)/Nse})}var Rse=z8(0),iG=z8(1),hOt=z8(2),fOt=z8(3),HT=z8(4),dOt=z8(5),gOt=z8(6);Rse.range,iG.range,hOt.range,fOt.range,HT.range,dOt.range,gOt.range;var pTe=Uf(function(r){r.setUTCDate(1),r.setUTCHours(0,0,0,0)},function(r,i){r.setUTCMonth(r.getUTCMonth()+i)},function(r,i){return i.getUTCMonth()-r.getUTCMonth()+(i.getUTCFullYear()-r.getUTCFullYear())*12},function(r){return r.getUTCMonth()});const pOt=pTe;pTe.range;var jse=Uf(function(r){r.setUTCMonth(0,1),r.setUTCHours(0,0,0,0)},function(r,i){r.setUTCFullYear(r.getUTCFullYear()+i)},function(r,i){return i.getUTCFullYear()-r.getUTCFullYear()},function(r){return r.getUTCFullYear()});jse.every=function(r){return!isFinite(r=Math.floor(r))||!(r>0)?null:Uf(function(i){i.setUTCFullYear(Math.floor(i.getUTCFullYear()/r)*r),i.setUTCMonth(0,1),i.setUTCHours(0,0,0,0)},function(i,o){i.setUTCFullYear(i.getUTCFullYear()+o*r)})};const zT=jse;jse.range;function bTe(r,i,o,l,f,b){const d=[[YL,1,V3],[YL,5,5*V3],[YL,15,15*V3],[YL,30,30*V3],[b,1,D2],[b,5,5*D2],[b,15,15*D2],[b,30,30*D2],[f,1,U3],[f,3,3*U3],[f,6,6*U3],[f,12,12*U3],[l,1,j8],[l,2,2*j8],[o,1,Nse],[i,1,aTe],[i,3,3*aTe],[r,1,Pse]];function w(k,E,T){const C=EB).right(d,C);if(S===d.length)return r.every(ese(k/Pse,E/Pse,T));if(S===0)return iOt.every(Math.max(ese(k,E,T),1));const[L,O]=d[C/d[S-1][2]53)return null;"w"in xe||(xe.w=1),"Z"in xe?(je=Hse(WL(xe.y,0,1)),me=je.getUTCDay(),je=me>4||me===0?iG.ceil(je):iG(je),je=Fse.offset(je,(xe.V-1)*7),xe.y=je.getUTCFullYear(),xe.m=je.getUTCMonth(),xe.d=je.getUTCDate()+(xe.w+6)%7):(je=$se(WL(xe.y,0,1)),me=je.getDay(),je=me>4||me===0?nG.ceil(je):nG(je),je=RT.offset(je,(xe.V-1)*7),xe.y=je.getFullYear(),xe.m=je.getMonth(),xe.d=je.getDate()+(xe.w+6)%7)}else("W"in xe||"U"in xe)&&("w"in xe||(xe.w="u"in xe?xe.u%7:"W"in xe?1:0),me="Z"in xe?Hse(WL(xe.y,0,1)).getUTCDay():$se(WL(xe.y,0,1)).getDay(),xe.m=0,xe.d="W"in xe?(xe.w+6)%7+xe.W*7-(me+5)%7:xe.w+xe.U*7-(me+6)%7);return"Z"in xe?(xe.H+=xe.Z/100|0,xe.M+=xe.Z%100,Hse(xe)):$se(xe)}}function Y(Oe,Le,$e,xe){for(var Ae=0,je=Le.length,me=$e.length,vt,ve;Ae=me)return-1;if(vt=Le.charCodeAt(Ae++),vt===37){if(vt=Le.charAt(Ae++),ve=X[vt in vTe?Le.charAt(Ae++):vt],!ve||(xe=ve(Oe,$e,xe))<0)return-1}else if(vt!=$e.charCodeAt(xe++))return-1}return xe}function z(Oe,Le,$e){var xe=k.exec(Le.slice($e));return xe?(Oe.p=E.get(xe[0].toLowerCase()),$e+xe[0].length):-1}function W(Oe,Le,$e){var xe=S.exec(Le.slice($e));return xe?(Oe.w=L.get(xe[0].toLowerCase()),$e+xe[0].length):-1}function Z(Oe,Le,$e){var xe=T.exec(Le.slice($e));return xe?(Oe.w=C.get(xe[0].toLowerCase()),$e+xe[0].length):-1}function G(Oe,Le,$e){var xe=N.exec(Le.slice($e));return xe?(Oe.m=F.get(xe[0].toLowerCase()),$e+xe[0].length):-1}function ae(Oe,Le,$e){var xe=O.exec(Le.slice($e));return xe?(Oe.m=B.get(xe[0].toLowerCase()),$e+xe[0].length):-1}function $(Oe,Le,$e){return Y(Oe,i,Le,$e)}function ge(Oe,Le,$e){return Y(Oe,o,Le,$e)}function ee(Oe,Le,$e){return Y(Oe,l,Le,$e)}function de(Oe){return d[Oe.getDay()]}function re(Oe){return b[Oe.getDay()]}function ke(Oe){return y[Oe.getMonth()]}function Ce(Oe){return w[Oe.getMonth()]}function _e(Oe){return f[+(Oe.getHours()>=12)]}function Te(Oe){return 1+~~(Oe.getMonth()/3)}function Be(Oe){return d[Oe.getUTCDay()]}function Ge(Oe){return b[Oe.getUTCDay()]}function Xe(Oe){return y[Oe.getUTCMonth()]}function Ee(Oe){return w[Oe.getUTCMonth()]}function Ze(Oe){return f[+(Oe.getUTCHours()>=12)]}function Ie(Oe){return 1+~~(Oe.getUTCMonth()/3)}return{format:function(Oe){var Le=te(Oe+="",R);return Le.toString=function(){return Oe},Le},parse:function(Oe){var Le=H(Oe+="",!1);return Le.toString=function(){return Oe},Le},utcFormat:function(Oe){var Le=te(Oe+="",q);return Le.toString=function(){return Oe},Le},utcParse:function(Oe){var Le=H(Oe+="",!0);return Le.toString=function(){return Oe},Le}}}var vTe={"-":"",_:" ",0:"0"},qf=/^\s*\d+/,mOt=/^%/,yOt=/[\\^$*+?|[\]().{}]/g;function Lo(r,i,o){var l=r<0?"-":"",f=(l?-r:r)+"",b=f.length;return l+(b[i.toLowerCase(),o]))}function xOt(r,i,o){var l=qf.exec(i.slice(o,o+1));return l?(r.w=+l[0],o+l[0].length):-1}function EOt(r,i,o){var l=qf.exec(i.slice(o,o+1));return l?(r.u=+l[0],o+l[0].length):-1}function _Ot(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.U=+l[0],o+l[0].length):-1}function TOt(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.V=+l[0],o+l[0].length):-1}function COt(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.W=+l[0],o+l[0].length):-1}function wTe(r,i,o){var l=qf.exec(i.slice(o,o+4));return l?(r.y=+l[0],o+l[0].length):-1}function mTe(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.y=+l[0]+(+l[0]>68?1900:2e3),o+l[0].length):-1}function SOt(r,i,o){var l=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(i.slice(o,o+6));return l?(r.Z=l[1]?0:-(l[2]+(l[3]||"00")),o+l[0].length):-1}function AOt(r,i,o){var l=qf.exec(i.slice(o,o+1));return l?(r.q=l[0]*3-3,o+l[0].length):-1}function MOt(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.m=l[0]-1,o+l[0].length):-1}function yTe(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.d=+l[0],o+l[0].length):-1}function DOt(r,i,o){var l=qf.exec(i.slice(o,o+3));return l?(r.m=0,r.d=+l[0],o+l[0].length):-1}function kTe(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.H=+l[0],o+l[0].length):-1}function LOt(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.M=+l[0],o+l[0].length):-1}function IOt(r,i,o){var l=qf.exec(i.slice(o,o+2));return l?(r.S=+l[0],o+l[0].length):-1}function OOt(r,i,o){var l=qf.exec(i.slice(o,o+3));return l?(r.L=+l[0],o+l[0].length):-1}function NOt(r,i,o){var l=qf.exec(i.slice(o,o+6));return l?(r.L=Math.floor(l[0]/1e3),o+l[0].length):-1}function POt(r,i,o){var l=mOt.exec(i.slice(o,o+1));return l?o+l[0].length:-1}function BOt(r,i,o){var l=qf.exec(i.slice(o));return l?(r.Q=+l[0],o+l[0].length):-1}function FOt(r,i,o){var l=qf.exec(i.slice(o));return l?(r.s=+l[0],o+l[0].length):-1}function xTe(r,i){return Lo(r.getDate(),i,2)}function ROt(r,i){return Lo(r.getHours(),i,2)}function jOt(r,i){return Lo(r.getHours()%12||12,i,2)}function $Ot(r,i){return Lo(1+RT.count(H8(r),r),i,3)}function ETe(r,i){return Lo(r.getMilliseconds(),i,3)}function HOt(r,i){return ETe(r,i)+"000"}function zOt(r,i){return Lo(r.getMonth()+1,i,2)}function GOt(r,i){return Lo(r.getMinutes(),i,2)}function VOt(r,i){return Lo(r.getSeconds(),i,2)}function UOt(r){var i=r.getDay();return i===0?7:i}function qOt(r,i){return Lo(jT.count(H8(r)-1,r),i,2)}function _Te(r){var i=r.getDay();return i>=4||i===0?$T(r):$T.ceil(r)}function YOt(r,i){return r=_Te(r),Lo($T.count(H8(r),r)+(H8(r).getDay()===4),i,2)}function WOt(r){return r.getDay()}function KOt(r,i){return Lo(nG.count(H8(r)-1,r),i,2)}function XOt(r,i){return Lo(r.getFullYear()%100,i,2)}function QOt(r,i){return r=_Te(r),Lo(r.getFullYear()%100,i,2)}function ZOt(r,i){return Lo(r.getFullYear()%1e4,i,4)}function JOt(r,i){var o=r.getDay();return r=o>=4||o===0?$T(r):$T.ceil(r),Lo(r.getFullYear()%1e4,i,4)}function eNt(r){var i=r.getTimezoneOffset();return(i>0?"-":(i*=-1,"+"))+Lo(i/60|0,"0",2)+Lo(i%60,"0",2)}function TTe(r,i){return Lo(r.getUTCDate(),i,2)}function tNt(r,i){return Lo(r.getUTCHours(),i,2)}function nNt(r,i){return Lo(r.getUTCHours()%12||12,i,2)}function rNt(r,i){return Lo(1+Fse.count(zT(r),r),i,3)}function CTe(r,i){return Lo(r.getUTCMilliseconds(),i,3)}function iNt(r,i){return CTe(r,i)+"000"}function sNt(r,i){return Lo(r.getUTCMonth()+1,i,2)}function aNt(r,i){return Lo(r.getUTCMinutes(),i,2)}function oNt(r,i){return Lo(r.getUTCSeconds(),i,2)}function cNt(r){var i=r.getUTCDay();return i===0?7:i}function uNt(r,i){return Lo(Rse.count(zT(r)-1,r),i,2)}function STe(r){var i=r.getUTCDay();return i>=4||i===0?HT(r):HT.ceil(r)}function lNt(r,i){return r=STe(r),Lo(HT.count(zT(r),r)+(zT(r).getUTCDay()===4),i,2)}function hNt(r){return r.getUTCDay()}function fNt(r,i){return Lo(iG.count(zT(r)-1,r),i,2)}function dNt(r,i){return Lo(r.getUTCFullYear()%100,i,2)}function gNt(r,i){return r=STe(r),Lo(r.getUTCFullYear()%100,i,2)}function pNt(r,i){return Lo(r.getUTCFullYear()%1e4,i,4)}function bNt(r,i){var o=r.getUTCDay();return r=o>=4||o===0?HT(r):HT.ceil(r),Lo(r.getUTCFullYear()%1e4,i,4)}function vNt(){return"+0000"}function ATe(){return"%"}function MTe(r){return+r}function DTe(r){return Math.floor(+r/1e3)}var GT,sG;wNt({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});function wNt(r){return GT=wOt(r),sG=GT.format,GT.parse,GT.utcFormat,GT.utcParse,GT}function mNt(r){return new Date(r)}function yNt(r){return r instanceof Date?+r:+new Date(+r)}function LTe(r,i,o,l,f,b,d,w,y,k){var E=iTe(),T=E.invert,C=E.domain,S=k(".%L"),L=k(":%S"),O=k("%I:%M"),B=k("%I %p"),N=k("%a %d"),F=k("%b %d"),R=k("%B"),q=k("%Y");function X(te){return(y(te)1?0:r<-1?QL:Math.acos(r)}function OTe(r){return r>=1?aG:r<=-1?-aG:Math.asin(r)}function _Nt(r){return r.innerRadius}function TNt(r){return r.outerRadius}function CNt(r){return r.startAngle}function SNt(r){return r.endAngle}function ANt(r){return r&&r.padAngle}function MNt(r,i,o,l,f,b,d,w){var y=o-r,k=l-i,E=d-f,T=w-b,C=T*y-E*k;if(!(C*C$*$+ge*ge&&(Y=W,z=Z),{cx:Y,cy:z,x01:-E,y01:-T,x11:Y*(f/X-1),y11:z*(f/X-1)}}function ZL(){var r=_Nt,i=TNt,o=sh(0),l=null,f=CNt,b=SNt,d=ANt,w=null;function y(){var k,E,T=+r.apply(this,arguments),C=+i.apply(this,arguments),S=f.apply(this,arguments)-aG,L=b.apply(this,arguments)-aG,O=ITe(L-S),B=L>S;if(w||(w=k=Ase()),Cod))w.moveTo(0,0);else if(O>oG-od)w.moveTo(C*G8(S),C*Om(S)),w.arc(0,0,C,S,L,!B),T>od&&(w.moveTo(T*G8(L),T*Om(L)),w.arc(0,0,T,L,S,B));else{var N=S,F=L,R=S,q=L,X=O,te=O,H=d.apply(this,arguments)/2,Y=H>od&&(l?+l.apply(this,arguments):VT(T*T+C*C)),z=zse(ITe(C-T)/2,+o.apply(this,arguments)),W=z,Z=z,G,ae;if(Y>od){var $=OTe(Y/T*Om(H)),ge=OTe(Y/C*Om(H));(X-=$*2)>od?($*=B?1:-1,R+=$,q-=$):(X=0,R=q=(S+L)/2),(te-=ge*2)>od?(ge*=B?1:-1,N+=ge,F-=ge):(te=0,N=F=(S+L)/2)}var ee=C*G8(N),de=C*Om(N),re=T*G8(q),ke=T*Om(q);if(z>od){var Ce=C*G8(F),_e=C*Om(F),Te=T*G8(R),Be=T*Om(R),Ge;if(Ood?Z>od?(G=cG(Te,Be,ee,de,C,Z,B),ae=cG(Ce,_e,re,ke,C,Z,B),w.moveTo(G.cx+G.x01,G.cy+G.y01),Zod)||!(X>od)?w.lineTo(re,ke):W>od?(G=cG(re,ke,Ce,_e,T,-W,B),ae=cG(ee,de,Te,Be,T,-W,B),w.lineTo(G.cx+G.x01,G.cy+G.y01),Wr?1:i>=r?0:NaN}function ONt(r){return r}function NNt(){var r=ONt,i=INt,o=null,l=sh(0),f=sh(oG),b=sh(0);function d(w){var y,k=(w=NTe(w)).length,E,T,C=0,S=new Array(k),L=new Array(k),O=+l.apply(this,arguments),B=Math.min(oG,Math.max(-oG,f.apply(this,arguments)-O)),N,F=Math.min(Math.abs(B)/k,b.apply(this,arguments)),R=F*(B<0?-1:1),q;for(y=0;y0&&(C+=q);for(i!=null?S.sort(function(X,te){return i(L[X],L[te])}):o!=null&&S.sort(function(X,te){return o(w[X],w[te])}),y=0,T=C?(B-k*R)/C:0;y0?q*T:0)+R,L[E]={data:w[E],index:y,value:q,startAngle:O,endAngle:N,padAngle:F};return L}return d.value=function(w){return arguments.length?(r=typeof w=="function"?w:sh(+w),d):r},d.sortValues=function(w){return arguments.length?(i=w,o=null,d):i},d.sort=function(w){return arguments.length?(o=w,i=null,d):o},d.startAngle=function(w){return arguments.length?(l=typeof w=="function"?w:sh(+w),d):l},d.endAngle=function(w){return arguments.length?(f=typeof w=="function"?w:sh(+w),d):f},d.padAngle=function(w){return arguments.length?(b=typeof w=="function"?w:sh(+w),d):b},d}class BTe{constructor(i,o){this._context=i,this._x=o}areaStart(){this._line=0}areaEnd(){this._line=NaN}lineStart(){this._point=0}lineEnd(){(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line}point(i,o){switch(i=+i,o=+o,this._point){case 0:{this._point=1,this._line?this._context.lineTo(i,o):this._context.moveTo(i,o);break}case 1:this._point=2;default:{this._x?this._context.bezierCurveTo(this._x0=(this._x0+i)/2,this._y0,this._x0,o,i,o):this._context.bezierCurveTo(this._x0,this._y0=(this._y0+o)/2,i,this._y0,i,o);break}}this._x0=i,this._y0=o}}function PNt(r){return new BTe(r,!0)}function BNt(r){return new BTe(r,!1)}function s6(){}function uG(r,i,o){r._context.bezierCurveTo((2*r._x0+r._x1)/3,(2*r._y0+r._y1)/3,(r._x0+2*r._x1)/3,(r._y0+2*r._y1)/3,(r._x0+4*r._x1+i)/6,(r._y0+4*r._y1+o)/6)}function lG(r){this._context=r}lG.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:uG(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1);break}(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(r,i){switch(r=+r,i=+i,this._point){case 0:this._point=1,this._line?this._context.lineTo(r,i):this._context.moveTo(r,i);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:uG(this,r,i);break}this._x0=this._x1,this._x1=r,this._y0=this._y1,this._y1=i}};function UT(r){return new lG(r)}function FTe(r){this._context=r}FTe.prototype={areaStart:s6,areaEnd:s6,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:{this._context.moveTo(this._x2,this._y2),this._context.closePath();break}case 2:{this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break}case 3:{this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4);break}}},point:function(r,i){switch(r=+r,i=+i,this._point){case 0:this._point=1,this._x2=r,this._y2=i;break;case 1:this._point=2,this._x3=r,this._y3=i;break;case 2:this._point=3,this._x4=r,this._y4=i,this._context.moveTo((this._x0+4*this._x1+r)/6,(this._y0+4*this._y1+i)/6);break;default:uG(this,r,i);break}this._x0=this._x1,this._x1=r,this._y0=this._y1,this._y1=i}};function FNt(r){return new FTe(r)}function RTe(r){this._context=r}RTe.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||this._line!==0&&this._point===3)&&this._context.closePath(),this._line=1-this._line},point:function(r,i){switch(r=+r,i=+i,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var o=(this._x0+4*this._x1+r)/6,l=(this._y0+4*this._y1+i)/6;this._line?this._context.lineTo(o,l):this._context.moveTo(o,l);break;case 3:this._point=4;default:uG(this,r,i);break}this._x0=this._x1,this._x1=r,this._y0=this._y1,this._y1=i}};function RNt(r){return new RTe(r)}function jTe(r,i){this._basis=new lG(r),this._beta=i}jTe.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var r=this._x,i=this._y,o=r.length-1;if(o>0)for(var l=r[0],f=i[0],b=r[o]-l,d=i[o]-f,w=-1,y;++w<=o;)y=w/o,this._basis.point(this._beta*r[w]+(1-this._beta)*(l+y*b),this._beta*i[w]+(1-this._beta)*(f+y*d));this._x=this._y=null,this._basis.lineEnd()},point:function(r,i){this._x.push(+r),this._y.push(+i)}};const jNt=function r(i){function o(l){return i===1?new lG(l):new jTe(l,i)}return o.beta=function(l){return r(+l)},o}(.85);function hG(r,i,o){r._context.bezierCurveTo(r._x1+r._k*(r._x2-r._x0),r._y1+r._k*(r._y2-r._y0),r._x2+r._k*(r._x1-i),r._y2+r._k*(r._y1-o),r._x2,r._y2)}function Gse(r,i){this._context=r,this._k=(1-i)/6}Gse.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:hG(this,this._x1,this._y1);break}(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(r,i){switch(r=+r,i=+i,this._point){case 0:this._point=1,this._line?this._context.lineTo(r,i):this._context.moveTo(r,i);break;case 1:this._point=2,this._x1=r,this._y1=i;break;case 2:this._point=3;default:hG(this,r,i);break}this._x0=this._x1,this._x1=this._x2,this._x2=r,this._y0=this._y1,this._y1=this._y2,this._y2=i}};const $Nt=function r(i){function o(l){return new Gse(l,i)}return o.tension=function(l){return r(+l)},o}(0);function Vse(r,i){this._context=r,this._k=(1-i)/6}Vse.prototype={areaStart:s6,areaEnd:s6,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:{this._context.moveTo(this._x3,this._y3),this._context.closePath();break}case 2:{this._context.lineTo(this._x3,this._y3),this._context.closePath();break}case 3:{this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5);break}}},point:function(r,i){switch(r=+r,i=+i,this._point){case 0:this._point=1,this._x3=r,this._y3=i;break;case 1:this._point=2,this._context.moveTo(this._x4=r,this._y4=i);break;case 2:this._point=3,this._x5=r,this._y5=i;break;default:hG(this,r,i);break}this._x0=this._x1,this._x1=this._x2,this._x2=r,this._y0=this._y1,this._y1=this._y2,this._y2=i}};const HNt=function r(i){function o(l){return new Vse(l,i)}return o.tension=function(l){return r(+l)},o}(0);function Use(r,i){this._context=r,this._k=(1-i)/6}Use.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||this._line!==0&&this._point===3)&&this._context.closePath(),this._line=1-this._line},point:function(r,i){switch(r=+r,i=+i,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:hG(this,r,i);break}this._x0=this._x1,this._x1=this._x2,this._x2=r,this._y0=this._y1,this._y1=this._y2,this._y2=i}};const zNt=function r(i){function o(l){return new Use(l,i)}return o.tension=function(l){return r(+l)},o}(0);function qse(r,i,o){var l=r._x1,f=r._y1,b=r._x2,d=r._y2;if(r._l01_a>od){var w=2*r._l01_2a+3*r._l01_a*r._l12_a+r._l12_2a,y=3*r._l01_a*(r._l01_a+r._l12_a);l=(l*w-r._x0*r._l12_2a+r._x2*r._l01_2a)/y,f=(f*w-r._y0*r._l12_2a+r._y2*r._l01_2a)/y}if(r._l23_a>od){var k=2*r._l23_2a+3*r._l23_a*r._l12_a+r._l12_2a,E=3*r._l23_a*(r._l23_a+r._l12_a);b=(b*k+r._x1*r._l23_2a-i*r._l12_2a)/E,d=(d*k+r._y1*r._l23_2a-o*r._l12_2a)/E}r._context.bezierCurveTo(l,f,b,d,r._x2,r._y2)}function $Te(r,i){this._context=r,this._alpha=i}$Te.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2);break}(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(r,i){if(r=+r,i=+i,this._point){var o=this._x2-r,l=this._y2-i;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(o*o+l*l,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(r,i):this._context.moveTo(r,i);break;case 1:this._point=2;break;case 2:this._point=3;default:qse(this,r,i);break}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=r,this._y0=this._y1,this._y1=this._y2,this._y2=i}};const GNt=function r(i){function o(l){return i?new $Te(l,i):new Gse(l,0)}return o.alpha=function(l){return r(+l)},o}(.5);function HTe(r,i){this._context=r,this._alpha=i}HTe.prototype={areaStart:s6,areaEnd:s6,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:{this._context.moveTo(this._x3,this._y3),this._context.closePath();break}case 2:{this._context.lineTo(this._x3,this._y3),this._context.closePath();break}case 3:{this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5);break}}},point:function(r,i){if(r=+r,i=+i,this._point){var o=this._x2-r,l=this._y2-i;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(o*o+l*l,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=r,this._y3=i;break;case 1:this._point=2,this._context.moveTo(this._x4=r,this._y4=i);break;case 2:this._point=3,this._x5=r,this._y5=i;break;default:qse(this,r,i);break}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=r,this._y0=this._y1,this._y1=this._y2,this._y2=i}};const VNt=function r(i){function o(l){return i?new HTe(l,i):new Vse(l,0)}return o.alpha=function(l){return r(+l)},o}(.5);function zTe(r,i){this._context=r,this._alpha=i}zTe.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||this._line!==0&&this._point===3)&&this._context.closePath(),this._line=1-this._line},point:function(r,i){if(r=+r,i=+i,this._point){var o=this._x2-r,l=this._y2-i;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(o*o+l*l,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:qse(this,r,i);break}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=r,this._y0=this._y1,this._y1=this._y2,this._y2=i}};const UNt=function r(i){function o(l){return i?new zTe(l,i):new Use(l,0)}return o.alpha=function(l){return r(+l)},o}(.5);function GTe(r){this._context=r}GTe.prototype={areaStart:s6,areaEnd:s6,lineStart:function(){this._point=0},lineEnd:function(){this._point&&this._context.closePath()},point:function(r,i){r=+r,i=+i,this._point?this._context.lineTo(r,i):(this._point=1,this._context.moveTo(r,i))}};function qNt(r){return new GTe(r)}function VTe(r){return r<0?-1:1}function UTe(r,i,o){var l=r._x1-r._x0,f=i-r._x1,b=(r._y1-r._y0)/(l||f<0&&-0),d=(o-r._y1)/(f||l<0&&-0),w=(b*f+d*l)/(l+f);return(VTe(b)+VTe(d))*Math.min(Math.abs(b),Math.abs(d),.5*Math.abs(w))||0}function qTe(r,i){var o=r._x1-r._x0;return o?(3*(r._y1-r._y0)/o-i)/2:i}function Yse(r,i,o){var l=r._x0,f=r._y0,b=r._x1,d=r._y1,w=(b-l)/3;r._context.bezierCurveTo(l+w,f+w*i,b-w,d-w*o,b,d)}function fG(r){this._context=r}fG.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=this._t0=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x1,this._y1);break;case 3:Yse(this,this._t0,qTe(this,this._t0));break}(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(r,i){var o=NaN;if(r=+r,i=+i,!(r===this._x1&&i===this._y1)){switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(r,i):this._context.moveTo(r,i);break;case 1:this._point=2;break;case 2:this._point=3,Yse(this,qTe(this,o=UTe(this,r,i)),o);break;default:Yse(this,this._t0,o=UTe(this,r,i));break}this._x0=this._x1,this._x1=r,this._y0=this._y1,this._y1=i,this._t0=o}}};function YTe(r){this._context=new WTe(r)}(YTe.prototype=Object.create(fG.prototype)).point=function(r,i){fG.prototype.point.call(this,i,r)};function WTe(r){this._context=r}WTe.prototype={moveTo:function(r,i){this._context.moveTo(i,r)},closePath:function(){this._context.closePath()},lineTo:function(r,i){this._context.lineTo(i,r)},bezierCurveTo:function(r,i,o,l,f,b){this._context.bezierCurveTo(i,r,l,o,b,f)}};function YNt(r){return new fG(r)}function WNt(r){return new YTe(r)}function KTe(r){this._context=r}KTe.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x=[],this._y=[]},lineEnd:function(){var r=this._x,i=this._y,o=r.length;if(o)if(this._line?this._context.lineTo(r[0],i[0]):this._context.moveTo(r[0],i[0]),o===2)this._context.lineTo(r[1],i[1]);else for(var l=XTe(r),f=XTe(i),b=0,d=1;d=0;--i)f[i]=(d[i]-f[i+1])/b[i];for(b[o-1]=(r[o]+f[o-1])/2,i=0;i=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(r,i){switch(r=+r,i=+i,this._point){case 0:this._point=1,this._line?this._context.lineTo(r,i):this._context.moveTo(r,i);break;case 1:this._point=2;default:{if(this._t<=0)this._context.lineTo(this._x,i),this._context.lineTo(r,i);else{var o=this._x*(1-this._t)+r*this._t;this._context.lineTo(o,this._y),this._context.lineTo(o,i)}break}}this._x=r,this._y=i}};function XNt(r){return new dG(r,.5)}function QNt(r){return new dG(r,0)}function ZNt(r){return new dG(r,1)}function qT(r,i,o){this.k=r,this.x=i,this.y=o}qT.prototype={constructor:qT,scale:function(r){return r===1?this:new qT(this.k*r,this.x,this.y)},translate:function(r,i){return r===0&i===0?this:new qT(this.k,this.x+this.k*r,this.y+this.k*i)},apply:function(r){return[r[0]*this.k+this.x,r[1]*this.k+this.y]},applyX:function(r){return r*this.k+this.x},applyY:function(r){return r*this.k+this.y},invert:function(r){return[(r[0]-this.x)/this.k,(r[1]-this.y)/this.k]},invertX:function(r){return(r-this.x)/this.k},invertY:function(r){return(r-this.y)/this.k},rescaleX:function(r){return r.copy().domain(r.range().map(this.invertX,this).map(r.invert,r))},rescaleY:function(r){return r.copy().domain(r.range().map(this.invertY,this).map(r.invert,r))},toString:function(){return"translate("+this.x+","+this.y+") scale("+this.k+")"}},new qT(1,0,0),qT.prototype;/*! @license DOMPurify 2.4.3 | (c) Cure53 and other contributors | Released under the Apache license 2.0 and Mozilla Public License 2.0 | github.com/cure53/DOMPurify/blob/2.4.3/LICENSE */function a6(r){return a6=typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?function(i){return typeof i}:function(i){return i&&typeof Symbol=="function"&&i.constructor===Symbol&&i!==Symbol.prototype?"symbol":typeof i},a6(r)}function Wse(r,i){return Wse=Object.setPrototypeOf||function(l,f){return l.__proto__=f,l},Wse(r,i)}function JNt(){if(typeof Reflect>"u"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch{return!1}}function gG(r,i,o){return JNt()?gG=Reflect.construct:gG=function(f,b,d){var w=[null];w.push.apply(w,b);var y=Function.bind.apply(f,w),k=new y;return d&&Wse(k,d.prototype),k},gG.apply(null,arguments)}function Cv(r){return ePt(r)||tPt(r)||nPt(r)||rPt()}function ePt(r){if(Array.isArray(r))return Kse(r)}function tPt(r){if(typeof Symbol<"u"&&r[Symbol.iterator]!=null||r["@@iterator"]!=null)return Array.from(r)}function nPt(r,i){if(!!r){if(typeof r=="string")return Kse(r,i);var o=Object.prototype.toString.call(r).slice(8,-1);if(o==="Object"&&r.constructor&&(o=r.constructor.name),o==="Map"||o==="Set")return Array.from(r);if(o==="Arguments"||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o))return Kse(r,i)}}function Kse(r,i){(i==null||i>r.length)&&(i=r.length);for(var o=0,l=new Array(i);o1?o-1:0),f=1;f/gm),wPt=Sv(/\${[\w\W]*}/gm),mPt=Sv(/^data-[\-\w.\u00B7-\uFFFF]/),yPt=Sv(/^aria-[\-\w]+$/),kPt=Sv(/^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i),xPt=Sv(/^(?:\w+script|data):/i),EPt=Sv(/[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205F\u3000]/g),_Pt=Sv(/^html$/i),TPt=function(){return typeof window>"u"?null:window},CPt=function(i,o){if(a6(i)!=="object"||typeof i.createPolicy!="function")return null;var l=null,f="data-tt-policy-suffix";o.currentScript&&o.currentScript.hasAttribute(f)&&(l=o.currentScript.getAttribute(f));var b="dompurify"+(l?"#"+l:"");try{return i.createPolicy(b,{createHTML:function(w){return w},createScriptURL:function(w){return w}})}catch{return console.warn("TrustedTypes policy "+b+" could not be created."),null}};function iCe(){var r=arguments.length>0&&arguments[0]!==void 0?arguments[0]:TPt(),i=function(bt){return iCe(bt)};if(i.version="2.4.3",i.removed=[],!r||!r.document||r.document.nodeType!==9)return i.isSupported=!1,i;var o=r.document,l=r.document,f=r.DocumentFragment,b=r.HTMLTemplateElement,d=r.Node,w=r.Element,y=r.NodeFilter,k=r.NamedNodeMap,E=k===void 0?r.NamedNodeMap||r.MozNamedAttrMap:k,T=r.HTMLFormElement,C=r.DOMParser,S=r.trustedTypes,L=w.prototype,O=vG(L,"cloneNode"),B=vG(L,"nextSibling"),N=vG(L,"childNodes"),F=vG(L,"parentNode");if(typeof b=="function"){var R=l.createElement("template");R.content&&R.content.ownerDocument&&(l=R.content.ownerDocument)}var q=CPt(S,o),X=q?q.createHTML(""):"",te=l,H=te.implementation,Y=te.createNodeIterator,z=te.createDocumentFragment,W=te.getElementsByTagName,Z=o.importNode,G={};try{G=U8(l).documentMode?l.documentMode:{}}catch{}var ae={};i.isSupported=typeof F=="function"&&H&&typeof H.createHTMLDocument<"u"&&G!==9;var $=bPt,ge=vPt,ee=wPt,de=mPt,re=yPt,ke=xPt,Ce=EPt,_e=kPt,Te=null,Be=xa({},[].concat(Cv(eCe),Cv(Jse),Cv(eae),Cv(tae),Cv(tCe))),Ge=null,Xe=xa({},[].concat(Cv(nCe),Cv(nae),Cv(rCe),Cv(wG))),Ee=Object.seal(Object.create(null,{tagNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},attributeNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},allowCustomizedBuiltInElements:{writable:!0,configurable:!1,enumerable:!0,value:!1}})),Ze=null,Ie=null,Oe=!0,Le=!0,$e=!1,xe=!1,Ae=!1,je=!1,me=!1,vt=!1,ve=!1,Zt=!1,nt=!0,xn=!1,cn="user-content-",jt=!0,ot=!1,be={},We=null,ct=xa({},["annotation-xml","audio","colgroup","desc","foreignobject","head","iframe","math","mi","mn","mo","ms","mtext","noembed","noframes","noscript","plaintext","script","style","svg","template","thead","title","video","xmp"]),Yt=null,Ut=xa({},["audio","video","img","source","image","track"]),Wn=null,Gt=xa({},["alt","class","for","id","label","name","pattern","placeholder","role","summary","title","value","style","xmlns"]),Rn="http://www.w3.org/1998/Math/MathML",si="http://www.w3.org/2000/svg",$r="http://www.w3.org/1999/xhtml",nr=$r,Kn=!1,Jt=null,en=xa({},[Rn,si,$r],Qse),In,hn=["application/xhtml+xml","text/html"],Fr="text/html",Pt,ei=null,nn=l.createElement("form"),hi=function(bt){return bt instanceof RegExp||bt instanceof Function},Hi=function(bt){ei&&ei===bt||((!bt||a6(bt)!=="object")&&(bt={}),bt=U8(bt),In=hn.indexOf(bt.PARSER_MEDIA_TYPE)===-1?In=Fr:In=bt.PARSER_MEDIA_TYPE,Pt=In==="application/xhtml+xml"?Qse:bG,Te="ALLOWED_TAGS"in bt?xa({},bt.ALLOWED_TAGS,Pt):Be,Ge="ALLOWED_ATTR"in bt?xa({},bt.ALLOWED_ATTR,Pt):Xe,Jt="ALLOWED_NAMESPACES"in bt?xa({},bt.ALLOWED_NAMESPACES,Qse):en,Wn="ADD_URI_SAFE_ATTR"in bt?xa(U8(Gt),bt.ADD_URI_SAFE_ATTR,Pt):Gt,Yt="ADD_DATA_URI_TAGS"in bt?xa(U8(Ut),bt.ADD_DATA_URI_TAGS,Pt):Ut,We="FORBID_CONTENTS"in bt?xa({},bt.FORBID_CONTENTS,Pt):ct,Ze="FORBID_TAGS"in bt?xa({},bt.FORBID_TAGS,Pt):{},Ie="FORBID_ATTR"in bt?xa({},bt.FORBID_ATTR,Pt):{},be="USE_PROFILES"in bt?bt.USE_PROFILES:!1,Oe=bt.ALLOW_ARIA_ATTR!==!1,Le=bt.ALLOW_DATA_ATTR!==!1,$e=bt.ALLOW_UNKNOWN_PROTOCOLS||!1,xe=bt.SAFE_FOR_TEMPLATES||!1,Ae=bt.WHOLE_DOCUMENT||!1,vt=bt.RETURN_DOM||!1,ve=bt.RETURN_DOM_FRAGMENT||!1,Zt=bt.RETURN_TRUSTED_TYPE||!1,me=bt.FORCE_BODY||!1,nt=bt.SANITIZE_DOM!==!1,xn=bt.SANITIZE_NAMED_PROPS||!1,jt=bt.KEEP_CONTENT!==!1,ot=bt.IN_PLACE||!1,_e=bt.ALLOWED_URI_REGEXP||_e,nr=bt.NAMESPACE||$r,bt.CUSTOM_ELEMENT_HANDLING&&hi(bt.CUSTOM_ELEMENT_HANDLING.tagNameCheck)&&(Ee.tagNameCheck=bt.CUSTOM_ELEMENT_HANDLING.tagNameCheck),bt.CUSTOM_ELEMENT_HANDLING&&hi(bt.CUSTOM_ELEMENT_HANDLING.attributeNameCheck)&&(Ee.attributeNameCheck=bt.CUSTOM_ELEMENT_HANDLING.attributeNameCheck),bt.CUSTOM_ELEMENT_HANDLING&&typeof bt.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements=="boolean"&&(Ee.allowCustomizedBuiltInElements=bt.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements),xe&&(Le=!1),ve&&(vt=!0),be&&(Te=xa({},Cv(tCe)),Ge=[],be.html===!0&&(xa(Te,eCe),xa(Ge,nCe)),be.svg===!0&&(xa(Te,Jse),xa(Ge,nae),xa(Ge,wG)),be.svgFilters===!0&&(xa(Te,eae),xa(Ge,nae),xa(Ge,wG)),be.mathMl===!0&&(xa(Te,tae),xa(Ge,rCe),xa(Ge,wG))),bt.ADD_TAGS&&(Te===Be&&(Te=U8(Te)),xa(Te,bt.ADD_TAGS,Pt)),bt.ADD_ATTR&&(Ge===Xe&&(Ge=U8(Ge)),xa(Ge,bt.ADD_ATTR,Pt)),bt.ADD_URI_SAFE_ATTR&&xa(Wn,bt.ADD_URI_SAFE_ATTR,Pt),bt.FORBID_CONTENTS&&(We===ct&&(We=U8(We)),xa(We,bt.FORBID_CONTENTS,Pt)),jt&&(Te["#text"]=!0),Ae&&xa(Te,["html","head","body"]),Te.table&&(xa(Te,["tbody"]),delete Ze.tbody),Wd&&Wd(bt),ei=bt)},ss=xa({},["mi","mo","mn","ms","mtext"]),ls=xa({},["foreignobject","desc","title","annotation-xml"]),vs=xa({},["title","style","font","a","script"]),ti=xa({},Jse);xa(ti,eae),xa(ti,gPt);var zi=xa({},tae);xa(zi,pPt);var as=function(bt){var $n=F(bt);(!$n||!$n.tagName)&&($n={namespaceURI:nr,tagName:"template"});var Er=bG(bt.tagName),Ss=bG($n.tagName);return Jt[bt.namespaceURI]?bt.namespaceURI===si?$n.namespaceURI===$r?Er==="svg":$n.namespaceURI===Rn?Er==="svg"&&(Ss==="annotation-xml"||ss[Ss]):Boolean(ti[Er]):bt.namespaceURI===Rn?$n.namespaceURI===$r?Er==="math":$n.namespaceURI===si?Er==="math"&&ls[Ss]:Boolean(zi[Er]):bt.namespaceURI===$r?$n.namespaceURI===si&&!ls[Ss]||$n.namespaceURI===Rn&&!ss[Ss]?!1:!zi[Er]&&(vs[Er]||!ti[Er]):!!(In==="application/xhtml+xml"&&Jt[bt.namespaceURI]):!1},ai=function(bt){JL(i.removed,{element:bt});try{bt.parentNode.removeChild(bt)}catch{try{bt.outerHTML=X}catch{bt.remove()}}},hc=function(bt,$n){try{JL(i.removed,{attribute:$n.getAttributeNode(bt),from:$n})}catch{JL(i.removed,{attribute:null,from:$n})}if($n.removeAttribute(bt),bt==="is"&&!Ge[bt])if(vt||ve)try{ai($n)}catch{}else try{$n.setAttribute(bt,"")}catch{}},xu=function(bt){var $n,Er;if(me)bt=""+bt;else{var Ss=lPt(bt,/^[\r\n\t ]+/);Er=Ss&&Ss[0]}In==="application/xhtml+xml"&&nr===$r&&(bt=''+bt+"");var wo=q?q.createHTML(bt):bt;if(nr===$r)try{$n=new C().parseFromString(wo,In)}catch{}if(!$n||!$n.documentElement){$n=H.createDocument(nr,"template",null);try{$n.documentElement.innerHTML=Kn?X:wo}catch{}}var Po=$n.body||$n.documentElement;return bt&&Er&&Po.insertBefore(l.createTextNode(Er),Po.childNodes[0]||null),nr===$r?W.call($n,Ae?"html":"body")[0]:Ae?$n.documentElement:Po},No=function(bt){return Y.call(bt.ownerDocument||bt,bt,y.SHOW_ELEMENT|y.SHOW_COMMENT|y.SHOW_TEXT,null,!1)},Si=function(bt){return bt instanceof T&&(typeof bt.nodeName!="string"||typeof bt.textContent!="string"||typeof bt.removeChild!="function"||!(bt.attributes instanceof E)||typeof bt.removeAttribute!="function"||typeof bt.setAttribute!="function"||typeof bt.namespaceURI!="string"||typeof bt.insertBefore!="function"||typeof bt.hasChildNodes!="function")},Yc=function(bt){return a6(d)==="object"?bt instanceof d:bt&&a6(bt)==="object"&&typeof bt.nodeType=="number"&&typeof bt.nodeName=="string"},lh=function(bt,$n,Er){!ae[bt]||uPt(ae[bt],function(Ss){Ss.call(i,$n,Er,ei)})},su=function(bt){var $n;if(lh("beforeSanitizeElements",bt,null),Si(bt)||Kd(/[\u0080-\uFFFF]/,bt.nodeName))return ai(bt),!0;var Er=Pt(bt.nodeName);if(lh("uponSanitizeElement",bt,{tagName:Er,allowedTags:Te}),bt.hasChildNodes()&&!Yc(bt.firstElementChild)&&(!Yc(bt.content)||!Yc(bt.content.firstElementChild))&&Kd(/<[/\w]/g,bt.innerHTML)&&Kd(/<[/\w]/g,bt.textContent)||Er==="select"&&Kd(/