Spaces:
Runtime error
Runtime error
添加Markdown全文翻译插件,并用此插件翻译了本项目的README
Browse files- crazy_functional.py +14 -1
- crazy_functions/Latex全文翻译.py +1 -1
- crazy_functions/批量Markdown翻译.py +162 -0
- img/README_EN.md +294 -0
crazy_functional.py
CHANGED
@@ -85,6 +85,8 @@ def get_crazy_functions():
|
|
85 |
from crazy_functions.Latex全文润色 import Latex中文润色
|
86 |
from crazy_functions.Latex全文翻译 import Latex中译英
|
87 |
from crazy_functions.Latex全文翻译 import Latex英译中
|
|
|
|
|
88 |
|
89 |
function_plugins.update({
|
90 |
"批量翻译PDF文档(多线程)": {
|
@@ -142,7 +144,18 @@ def get_crazy_functions():
|
|
142 |
"AsButton": False, # 加入下拉菜单中
|
143 |
"Function": HotReload(Latex英译中)
|
144 |
},
|
145 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
146 |
|
147 |
})
|
148 |
|
|
|
85 |
from crazy_functions.Latex全文润色 import Latex中文润色
|
86 |
from crazy_functions.Latex全文翻译 import Latex中译英
|
87 |
from crazy_functions.Latex全文翻译 import Latex英译中
|
88 |
+
from crazy_functions.批量Markdown翻译 import Markdown中译英
|
89 |
+
from crazy_functions.批量Markdown翻译 import Markdown英译中
|
90 |
|
91 |
function_plugins.update({
|
92 |
"批量翻译PDF文档(多线程)": {
|
|
|
144 |
"AsButton": False, # 加入下拉菜单中
|
145 |
"Function": HotReload(Latex英译中)
|
146 |
},
|
147 |
+
"[测试功能] 批量Markdown中译英(输入路径或上传压缩包)": {
|
148 |
+
# HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效
|
149 |
+
"Color": "stop",
|
150 |
+
"AsButton": False, # 加入下拉菜单中
|
151 |
+
"Function": HotReload(Markdown中译英)
|
152 |
+
},
|
153 |
+
"[测试功能] 批量Markdown英译中(输入路径或上传压缩包)": {
|
154 |
+
# HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效
|
155 |
+
"Color": "stop",
|
156 |
+
"AsButton": False, # 加入下拉菜单中
|
157 |
+
"Function": HotReload(Markdown英译中)
|
158 |
+
},
|
159 |
|
160 |
})
|
161 |
|
crazy_functions/Latex全文翻译.py
CHANGED
@@ -80,7 +80,7 @@ def 多文件翻译(file_manifest, project_folder, llm_kwargs, plugin_kwargs, ch
|
|
80 |
elif language == 'zh->en':
|
81 |
inputs_array = [f"Below is a section from a Chinese academic paper, translate it into English, do not modify any latex command such as \section, \cite and equations:" +
|
82 |
f"\n\n{frag}" for frag in pfg.sp_file_contents]
|
83 |
-
inputs_show_user_array = [f"
|
84 |
sys_prompt_array = ["You are a professional academic paper translator." for _ in range(n_split)]
|
85 |
|
86 |
gpt_response_collection = yield from request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency(
|
|
|
80 |
elif language == 'zh->en':
|
81 |
inputs_array = [f"Below is a section from a Chinese academic paper, translate it into English, do not modify any latex command such as \section, \cite and equations:" +
|
82 |
f"\n\n{frag}" for frag in pfg.sp_file_contents]
|
83 |
+
inputs_show_user_array = [f"翻译 {f}" for f in pfg.sp_file_tag]
|
84 |
sys_prompt_array = ["You are a professional academic paper translator." for _ in range(n_split)]
|
85 |
|
86 |
gpt_response_collection = yield from request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency(
|
crazy_functions/批量Markdown翻译.py
ADDED
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from toolbox import update_ui
|
2 |
+
from toolbox import CatchException, report_execption, write_results_to_file
|
3 |
+
fast_debug = False
|
4 |
+
|
5 |
+
class PaperFileGroup():
|
6 |
+
def __init__(self):
|
7 |
+
self.file_paths = []
|
8 |
+
self.file_contents = []
|
9 |
+
self.sp_file_contents = []
|
10 |
+
self.sp_file_index = []
|
11 |
+
self.sp_file_tag = []
|
12 |
+
|
13 |
+
# count_token
|
14 |
+
import tiktoken
|
15 |
+
from toolbox import get_conf
|
16 |
+
enc = tiktoken.encoding_for_model(*get_conf('LLM_MODEL'))
|
17 |
+
def get_token_num(txt): return len(enc.encode(txt))
|
18 |
+
self.get_token_num = get_token_num
|
19 |
+
|
20 |
+
def run_file_split(self, max_token_limit=1900):
|
21 |
+
"""
|
22 |
+
将长文本分离开来
|
23 |
+
"""
|
24 |
+
for index, file_content in enumerate(self.file_contents):
|
25 |
+
if self.get_token_num(file_content) < max_token_limit:
|
26 |
+
self.sp_file_contents.append(file_content)
|
27 |
+
self.sp_file_index.append(index)
|
28 |
+
self.sp_file_tag.append(self.file_paths[index])
|
29 |
+
else:
|
30 |
+
from .crazy_utils import breakdown_txt_to_satisfy_token_limit_for_pdf
|
31 |
+
segments = breakdown_txt_to_satisfy_token_limit_for_pdf(file_content, self.get_token_num, max_token_limit)
|
32 |
+
for j, segment in enumerate(segments):
|
33 |
+
self.sp_file_contents.append(segment)
|
34 |
+
self.sp_file_index.append(index)
|
35 |
+
self.sp_file_tag.append(self.file_paths[index] + f".part-{j}.md")
|
36 |
+
|
37 |
+
print('Segmentation: done')
|
38 |
+
|
39 |
+
def 多文件翻译(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, language='en'):
|
40 |
+
import time, os, re
|
41 |
+
from .crazy_utils import request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency
|
42 |
+
|
43 |
+
# <-------- 读取Markdown文件,删除其中的所有注释 ---------->
|
44 |
+
pfg = PaperFileGroup()
|
45 |
+
|
46 |
+
for index, fp in enumerate(file_manifest):
|
47 |
+
with open(fp, 'r', encoding='utf-8', errors='replace') as f:
|
48 |
+
file_content = f.read()
|
49 |
+
# 记录删除注释后的文本
|
50 |
+
pfg.file_paths.append(fp)
|
51 |
+
pfg.file_contents.append(file_content)
|
52 |
+
|
53 |
+
# <-------- 拆分过长的Markdown文件 ---------->
|
54 |
+
pfg.run_file_split(max_token_limit=2048)
|
55 |
+
n_split = len(pfg.sp_file_contents)
|
56 |
+
|
57 |
+
# <-------- 多线程润色开始 ---------->
|
58 |
+
if language == 'en->zh':
|
59 |
+
inputs_array = ["This is a Markdown file, translate it into Chinese, do not modify any existing Markdown commands:" +
|
60 |
+
f"\n\n{frag}" for frag in pfg.sp_file_contents]
|
61 |
+
inputs_show_user_array = [f"翻译 {f}" for f in pfg.sp_file_tag]
|
62 |
+
sys_prompt_array = ["You are a professional academic paper translator." for _ in range(n_split)]
|
63 |
+
elif language == 'zh->en':
|
64 |
+
inputs_array = [f"This is a Markdown file, translate it into English, do not modify any existing Markdown commands:" +
|
65 |
+
f"\n\n{frag}" for frag in pfg.sp_file_contents]
|
66 |
+
inputs_show_user_array = [f"翻译 {f}" for f in pfg.sp_file_tag]
|
67 |
+
sys_prompt_array = ["You are a professional academic paper translator." for _ in range(n_split)]
|
68 |
+
|
69 |
+
gpt_response_collection = yield from request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency(
|
70 |
+
inputs_array=inputs_array,
|
71 |
+
inputs_show_user_array=inputs_show_user_array,
|
72 |
+
llm_kwargs=llm_kwargs,
|
73 |
+
chatbot=chatbot,
|
74 |
+
history_array=[[""] for _ in range(n_split)],
|
75 |
+
sys_prompt_array=sys_prompt_array,
|
76 |
+
max_workers=10, # OpenAI所允许的最大并行过载
|
77 |
+
scroller_max_len = 80
|
78 |
+
)
|
79 |
+
|
80 |
+
# <-------- 整理结果,退出 ---------->
|
81 |
+
create_report_file_name = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + f"-chatgpt.polish.md"
|
82 |
+
res = write_results_to_file(gpt_response_collection, file_name=create_report_file_name)
|
83 |
+
history = gpt_response_collection
|
84 |
+
chatbot.append((f"{fp}完成了吗?", res))
|
85 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
86 |
+
|
87 |
+
|
88 |
+
|
89 |
+
|
90 |
+
|
91 |
+
@CatchException
|
92 |
+
def Markdown英译中(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
93 |
+
# 基本信息:功能、贡献者
|
94 |
+
chatbot.append([
|
95 |
+
"函数插件功能?",
|
96 |
+
"对整个Markdown项目进行翻译。函数插件贡献者: Binary-Husky"])
|
97 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
98 |
+
|
99 |
+
# 尝试导入依赖,如果缺少依赖,则给出安装建议
|
100 |
+
try:
|
101 |
+
import tiktoken
|
102 |
+
except:
|
103 |
+
report_execption(chatbot, history,
|
104 |
+
a=f"解析项目: {txt}",
|
105 |
+
b=f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade tiktoken```。")
|
106 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
107 |
+
return
|
108 |
+
history = [] # 清空历史,以免输入溢出
|
109 |
+
import glob, os
|
110 |
+
if os.path.exists(txt):
|
111 |
+
project_folder = txt
|
112 |
+
else:
|
113 |
+
if txt == "": txt = '空空如也的输入栏'
|
114 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
|
115 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
116 |
+
return
|
117 |
+
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.md', recursive=True)]
|
118 |
+
if len(file_manifest) == 0:
|
119 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.md文件: {txt}")
|
120 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
121 |
+
return
|
122 |
+
yield from 多文件翻译(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, language='en->zh')
|
123 |
+
|
124 |
+
|
125 |
+
|
126 |
+
|
127 |
+
|
128 |
+
@CatchException
|
129 |
+
def Markdown中译英(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
130 |
+
# 基本信息:功能、贡献者
|
131 |
+
chatbot.append([
|
132 |
+
"函数插件功能?",
|
133 |
+
"对整个Markdown项目进行翻译。函数插件贡献者: Binary-Husky"])
|
134 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
135 |
+
|
136 |
+
# 尝试导入依赖,如果缺少依赖,则给出安装建议
|
137 |
+
try:
|
138 |
+
import tiktoken
|
139 |
+
except:
|
140 |
+
report_execption(chatbot, history,
|
141 |
+
a=f"解析项目: {txt}",
|
142 |
+
b=f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade tiktoken```。")
|
143 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
144 |
+
return
|
145 |
+
history = [] # 清空历史,以免输入溢出
|
146 |
+
import glob, os
|
147 |
+
if os.path.exists(txt):
|
148 |
+
project_folder = txt
|
149 |
+
else:
|
150 |
+
if txt == "": txt = '空空如也的输入栏'
|
151 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
|
152 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
153 |
+
return
|
154 |
+
if txt.endswith('.md'):
|
155 |
+
file_manifest = [txt]
|
156 |
+
else:
|
157 |
+
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.md', recursive=True)]
|
158 |
+
if len(file_manifest) == 0:
|
159 |
+
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.md文件: {txt}")
|
160 |
+
yield from update_ui(chatbot=chatbot, history=history) # 刷新界面
|
161 |
+
return
|
162 |
+
yield from 多文件翻译(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, language='zh->en')
|
img/README_EN.md
ADDED
@@ -0,0 +1,294 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# ChatGPT Academic Optimization
|
2 |
+
> **Note**
|
3 |
+
>
|
4 |
+
> This English readme is automatically generated by the markdown translation plugin in this project, and may not be 100% correct.
|
5 |
+
>
|
6 |
+
|
7 |
+
|
8 |
+
**If you like this project, please give it a star. If you have come up with more useful academic shortcuts or functional plugins, feel free to open an issue or pull request (to the `dev` branch).**
|
9 |
+
|
10 |
+
> **Note**
|
11 |
+
>
|
12 |
+
> 1. Please note that only function plugins (buttons) marked in **red** support reading files, and some plugins are located in the **dropdown menu** in the plugin area. Additionally, we welcome and process PRs for any new plugins with the **highest priority**!
|
13 |
+
>
|
14 |
+
> 2. The functions of each file in this project are detailed in the self-translation report [self_analysis.md](https://github.com/binary-husky/chatgpt_academic/wiki/chatgpt-academic%E9%A1%B9%E7%9B%AE%E8%87%AA%E8%AF%91%E8%A7%A3%E6%8A%A5%E5%91%8A). With the version iteration, you can click on a relevant function plugin at any time to call GPT to regenerate the self-analysis report for the project. Commonly asked questions are summarized in the [`wiki`](https://github.com/binary-husky/chatgpt_academic/wiki/%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98).
|
15 |
+
>
|
16 |
+
> 3. If you are not used to the function, comments or interface with some Chinese names, you can click on the relevant function plugin at any time to call ChatGPT to generate the source code of the project in English.
|
17 |
+
|
18 |
+
<div align="center">
|
19 |
+
|
20 |
+
Function | Description
|
21 |
+
--- | ---
|
22 |
+
One-click refinement | Supports one-click refinement, one-click searching for grammatical errors in papers.
|
23 |
+
One-click translation between Chinese and English | One-click translation between Chinese and English.
|
24 |
+
One-click code interpretation | Can correctly display and interpret the code.
|
25 |
+
[Custom shortcuts](https://www.bilibili.com/video/BV14s4y1E7jN) | Supports custom shortcuts.
|
26 |
+
[Configure proxy server](https://www.bilibili.com/video/BV1rc411W7Dr) | Supports configuring proxy server.
|
27 |
+
Modular design | Supports custom high-order experimental features and [function plug-ins], and plug-ins support [hot update](https://github.com/binary-husky/chatgpt_academic/wiki/%E5%87%BD%E6%95%B0%E6%8F%92%E4%BB%B6%E6%8C%87%E5%8D%97).
|
28 |
+
[Self-program analysis](https://www.bilibili.com/video/BV1cj411A7VW) | [Function Plug-in] [One-Key Understanding](https://github.com/binary-husky/chatgpt_academic/wiki/chatgpt-academic%E9%A1%B9%E7%9B%AE%E8%87%AA%E8%AF%91%E8%A7%A3%E6%8A%A5%E5%91%8A) the source code of this project.
|
29 |
+
[Program analysis](https://www.bilibili.com/video/BV1cj411A7VW) | [Function Plug-in] One-click can analyze other Python/C/C++/Java/Golang/Lua/Rect project trees.
|
30 |
+
Read papers | [Function Plug-in] One-click reads the full text of a latex paper and generates an abstract.
|
31 |
+
Latex full-text translation/refinement | [Function Plug-in] One-click translates or refines a latex paper.
|
32 |
+
Batch annotation generation | [Function Plug-in] One-click generates function annotations in batches.
|
33 |
+
Chat analysis report generation | [Function Plug-in] Automatically generate summary reports after running.
|
34 |
+
[Arxiv assistant](https://www.bilibili.com/video/BV1LM4y1279X) | [Function Plug-in] Enter the arxiv paper url and you can translate the abstract and download the PDF with one click.
|
35 |
+
[PDF paper full-text translation function](https://www.bilibili.com/video/BV1KT411x7Wn) | [Function Plug-in] Extract title and abstract of PDF papers + translate full text (multi-threaded).
|
36 |
+
[Google Scholar integration assistant](https://www.bilibili.com/video/BV19L411U7ia) (Version>=2.45) | [Function Plug-in] Given any Google Scholar search page URL, let GPT help you choose interesting articles.
|
37 |
+
Formula display | Can simultaneously display the tex form and rendering form of formulas.
|
38 |
+
Image display | Can display images in Markdown.
|
39 |
+
Multithreaded function plug-in support | Supports multi-threaded calling of chatgpt, one-click processing of massive texts or programs.
|
40 |
+
Support for markdown tables output by GPT | Can output markdown tables that support GPT.
|
41 |
+
Start dark gradio theme [theme](https://github.com/binary-husky/chatgpt_academic/issues/173) | Add ```/?__dark-theme=true``` to the browser URL to switch to the dark theme.
|
42 |
+
Huggingface free scientific online experience](https://huggingface.co/spaces/qingxu98/gpt-academic) | After logging in to Huggingface, copy [this space](https://huggingface.co/spaces/qingxu98/gpt-academic).
|
43 |
+
[Mixed support for multiple LLM models](https://www.bilibili.com/video/BV1EM411K7VH/) ([v3.0 branch](https://github.com/binary-husky/chatgpt_academic/tree/v3.0) in testing) | It must feel great to be served by both ChatGPT and [Tsinghua ChatGLM](https://github.com/THUDM/ChatGLM-6B)!
|
44 |
+
Compatible with [TGUI](https://github.com/oobabooga/text-generation-webui) to access more language models | Access to opt-1.3b, galactica-1.3b and other models ([v3.0 branch](https://github.com/binary-husky/chatgpt_academic/tree/v3.0) under testing).
|
45 |
+
… | ...
|
46 |
+
|
47 |
+
</div>
|
48 |
+
|
49 |
+
<!-- - New interface (left: master branch, right: dev development frontier) -->
|
50 |
+
- New interface (modify the `LAYOUT` option in `config.py` to switch between "left and right layout" and "up and down layout").
|
51 |
+
<div align="center">
|
52 |
+
<img src="https://user-images.githubusercontent.com/96192199/230361456-61078362-a966-4eb5-b49e-3c62ef18b860.gif" width="700" >
|
53 |
+
</div>
|
54 |
+
|
55 |
+
- All buttons are dynamically generated by reading `functional.py`, and custom functions can be added freely, freeing up the clipboard.
|
56 |
+
<div align="center">
|
57 |
+
<img src="公式.gif" width="700" >
|
58 |
+
</div>
|
59 |
+
|
60 |
+
- Refinement/Correction
|
61 |
+
<div align="center">
|
62 |
+
<img src="润色.gif" width="700" >
|
63 |
+
</div>
|
64 |
+
|
65 |
+
- Supports markdown tables output by GPT.
|
66 |
+
<div align="center">
|
67 |
+
<img src="demo2.jpg" width="500" >
|
68 |
+
</div>
|
69 |
+
|
70 |
+
- If the output contains formulas, both the tex form and the rendering form are displayed simultaneously for easy copying and reading.
|
71 |
+
<div align="center">
|
72 |
+
<img src="https://user-images.githubusercontent.com/96192199/230598842-1d7fcddd-815d-40ee-af60-baf488a199df.png" width="700" >
|
73 |
+
</div>
|
74 |
+
|
75 |
+
- Don't want to read project code? Let chatgpt boast about the whole project.
|
76 |
+
<div align="center">
|
77 |
+
<img src="https://user-images.githubusercontent.com/96192199/226935232-6b6a73ce-8900-4aee-93f9-733c7e6fef53.png" width="700" >
|
78 |
+
</div>
|
79 |
+
|
80 |
+
- Multiple large language models mixed calling. ([v3.0 branch](https://github.com/binary-husky/chatgpt_academic/tree/v3.0) in testing)
|
81 |
+
|
82 |
+
|
83 |
+
## Running Directly (Windows, Linux or MacOS)
|
84 |
+
|
85 |
+
### 1. Download the Project
|
86 |
+
```sh
|
87 |
+
git clone https://github.com/binary-husky/chatgpt_academic.git
|
88 |
+
cd chatgpt_academic
|
89 |
+
```
|
90 |
+
|
91 |
+
### 2. Configure API_KEY and Proxy Settings
|
92 |
+
|
93 |
+
In `config.py`, configure the overseas Proxy and OpenAI API KEY, as follows:
|
94 |
+
```
|
95 |
+
1. If you are in China, you need to set an overseas proxy to use the OpenAI API smoothly. Please read the instructions in config.py carefully (1. Modify the USE_PROXY to True; 2. Modify the proxies according to the instructions).
|
96 |
+
2. Configure OpenAI API KEY. You need to register on the OpenAI official website and obtain an API KEY. Once you get the API KEY, configure it in the config.py file.
|
97 |
+
3. Issues related to proxy network (network timeout, proxy not working) are summarized to https://github.com/binary-husky/chatgpt_academic/issues/1
|
98 |
+
```
|
99 |
+
(Note: When the program is running, it will first check whether there is a private configuration file named `config_private.py`, and use the configuration in it to overwrite the same name configuration in `config.py`. Therefore, if you can understand our configuration reading logic, we strongly recommend that you create a new configuration file next to `config.py` named `config_private.py` and transfer (copy) the configuration in `config.py` to `config_private.py`. `config_private.py` is not managed by Git, which can make your privacy information more secure.)
|
100 |
+
|
101 |
+
### 3. Install Dependencies
|
102 |
+
```sh
|
103 |
+
# (Option 1) Recommended
|
104 |
+
python -m pip install -r requirements.txt
|
105 |
+
|
106 |
+
# (Option 2) If you use anaconda, the steps are also similar:
|
107 |
+
# (Option 2.1) conda create -n gptac_venv python=3.11
|
108 |
+
# (Option 2.2) conda activate gptac_venv
|
109 |
+
# (Option 2.3) python -m pip install -r requirements.txt
|
110 |
+
|
111 |
+
# Note: Use the official pip source or the Ali pip source. Other pip sources (such as some university pips) may have problems. Temporary substitution method:
|
112 |
+
# python -m pip install -r requirements.txt -i https://mirrors.aliyun.com/pypi/simple/
|
113 |
+
```
|
114 |
+
|
115 |
+
### 4. Run
|
116 |
+
```sh
|
117 |
+
python main.py
|
118 |
+
```
|
119 |
+
|
120 |
+
### 5. Test Experimental Features
|
121 |
+
```
|
122 |
+
- Test C++ Project Header Analysis
|
123 |
+
In the input area, enter `./crazy_functions/test_project/cpp/libJPG` , and then click "[Experiment] Parse the entire C++ project (input inputs the root path of the project)"
|
124 |
+
- Test Writing Abstracts for Latex Projects
|
125 |
+
In the input area, enter `./crazy_functions/test_project/latex/attention` , and then click "[Experiment] Read the tex paper and write an abstract (input inputs the root path of the project)"
|
126 |
+
- Test Python Project Analysis
|
127 |
+
In the input area, enter `./crazy_functions/test_project/python/dqn` , and then click "[Experiment] Parse the entire py project (input inputs the root path of the project)"
|
128 |
+
- Test Self-code Interpretation
|
129 |
+
Click "[Experiment] Please analyze and deconstruct this project itself"
|
130 |
+
- Test Experimental Function Template (asking GPT what happened in history today), you can implement more complex functions based on this template function
|
131 |
+
Click "[Experiment] Experimental function template"
|
132 |
+
```
|
133 |
+
|
134 |
+
## Use Docker (Linux)
|
135 |
+
|
136 |
+
``` sh
|
137 |
+
# Download Project
|
138 |
+
git clone https://github.com/binary-husky/chatgpt_academic.git
|
139 |
+
cd chatgpt_academic
|
140 |
+
# Configure Overseas Proxy and OpenAI API KEY
|
141 |
+
Configure config.py with any text editor
|
142 |
+
# Installation
|
143 |
+
docker build -t gpt-academic .
|
144 |
+
# Run
|
145 |
+
docker run --rm -it --net=host gpt-academic
|
146 |
+
|
147 |
+
# Test Experimental Features
|
148 |
+
## Test Self-code Interpretation
|
149 |
+
Click "[Experiment] Please analyze and deconstruct this project itself"
|
150 |
+
## Test Experimental Function Template (asking GPT what happened in history today), you can implement more complex functions based on this template function
|
151 |
+
Click "[Experiment] Experimental function template"
|
152 |
+
## (Please note that when running in docker, you need to pay extra attention to file access rights issues of the program.)
|
153 |
+
## Test C++ Project Header Analysis
|
154 |
+
In the input area, enter ./crazy_functions/test_project/cpp/libJPG , and then click "[Experiment] Parse the entire C++ project (input inputs the root path of the project)"
|
155 |
+
## Test Writing Abstracts for Latex Projects
|
156 |
+
In the input area, enter ./crazy_functions/test_project/latex/attention , and then click "[Experiment] Read the tex paper and write an abstract (input inputs the root path of the project)"
|
157 |
+
## Test Python Project Analysis
|
158 |
+
In the input area, enter ./crazy_functions/test_project/python/dqn , and then click "[Experiment] Parse the entire py project (input inputs the root path of the project)"
|
159 |
+
|
160 |
+
```
|
161 |
+
|
162 |
+
## Other Deployment Methods
|
163 |
+
- Use WSL2 (Windows Subsystem for Linux subsystem)
|
164 |
+
Please visit [Deploy Wiki-1] (https://github.com/binary-husky/chatgpt_academic/wiki/%E4%BD%BF%E7%94%A8WSL2%EF%BC%88Windows-Subsystem-for-Linux-%E5%AD%90%E7%B3%BB%E7%BB%9F%EF%BC%89%E9%83%A8%E7%BD%B2)
|
165 |
+
|
166 |
+
- nginx remote deployment
|
167 |
+
Please visit [Deploy Wiki-2] (https://github.com/binary-husky/chatgpt_academic/wiki/%E8%BF%9C%E7%A8%8B%E9%83%A8%E7%BD%B2%E7%9A%84%E6%8C%87%E5%AF%BC)
|
168 |
+
|
169 |
+
|
170 |
+
## Customizing New Convenient Buttons (Academic Shortcut Key Customization)
|
171 |
+
Open functional.py and add the entry as follows, and then restart the program. (If the button has been successfully added and is visible, both the prefix and suffix support hot modification and take effect without restarting the program.)
|
172 |
+
|
173 |
+
For example,
|
174 |
+
```
|
175 |
+
"Super English to Chinese Translation": {
|
176 |
+
|
177 |
+
# Prefix, which will be added before your input. For example, it is used to describe your requirements, such as translation, code interpretation, polishing, etc.
|
178 |
+
"Prefix": "Please translate the following content into Chinese, and then use a markdown table to explain each proprietary term in the text:\n\n",
|
179 |
+
|
180 |
+
# Suffix, which will be added after your input. For example, in conjunction with the prefix, you can bracket your input in quotes.
|
181 |
+
"Suffix": "",
|
182 |
+
|
183 |
+
},
|
184 |
+
```
|
185 |
+
<div align="center">
|
186 |
+
<img src="https://user-images.githubusercontent.com/96192199/226899272-477c2134-ed71-4326-810c-29891fe4a508.png" width="500" >
|
187 |
+
</div>
|
188 |
+
|
189 |
+
|
190 |
+
If you invent a more user-friendly academic shortcut key, welcome to post an issue or pull request!
|
191 |
+
|
192 |
+
## Configure Proxy
|
193 |
+
### Method 1: General Method
|
194 |
+
Modify the port and proxy software corresponding in ```config.py```
|
195 |
+
|
196 |
+
<div align="center">
|
197 |
+
<img src="https://user-images.githubusercontent.com/96192199/226571294-37a47cd9-4d40-4c16-97a2-d360845406f7.png" width="500" >
|
198 |
+
<img src="https://user-images.githubusercontent.com/96192199/226838985-e5c95956-69c2-4c23-a4dd-cd7944eeb451.png" width="500" >
|
199 |
+
</div>
|
200 |
+
|
201 |
+
|
202 |
+
After configuring, you can use the following command to test whether the proxy works. If everything is normal, the code below will output the location of your proxy server:
|
203 |
+
|
204 |
+
```
|
205 |
+
python check_proxy.py
|
206 |
+
```
|
207 |
+
|
208 |
+
### Method Two: Pure Beginner Tutorial
|
209 |
+
[Pure Beginner Tutorial](https://github.com/binary-husky/chatgpt_academic/wiki/%E4%BB%A3%E7%90%86%E8%BD%AF%E4%BB%B6%E9%97%AE%E9%A2%98%E7%9A%84%E6%96%B0%E6%89%8B%E8%A7%A3%E5%86%B3%E6%96%B9%E6%B3%95%EF%BC%88%E6%96%B9%E6%B3%95%E5%8F%AA%E9%80%82%E7%94%A8%E4%BA%8E%E6%96%B0%E6%89%8B%EF%BC%89)
|
210 |
+
|
211 |
+
## Compatibility Testing
|
212 |
+
|
213 |
+
### Image Display:
|
214 |
+
|
215 |
+
<div align="center">
|
216 |
+
<img src="https://user-images.githubusercontent.com/96192199/228737599-bf0a9d9c-1808-4f43-ae15-dfcc7af0f295.png" width="800" >
|
217 |
+
</div>
|
218 |
+
|
219 |
+
|
220 |
+
### If the program can read and analyze itself:
|
221 |
+
|
222 |
+
<div align="center">
|
223 |
+
<img src="https://user-images.githubusercontent.com/96192199/226936850-c77d7183-0749-4c1c-9875-fd4891842d0c.png" width="800" >
|
224 |
+
</div>
|
225 |
+
|
226 |
+
<div align="center">
|
227 |
+
<img src="https://user-images.githubusercontent.com/96192199/226936618-9b487e4b-ab5b-4b6e-84c6-16942102e917.png" width="800" >
|
228 |
+
</div>
|
229 |
+
|
230 |
+
### Any other Python/Cpp project analysis:
|
231 |
+
<div align="center">
|
232 |
+
<img src="https://user-images.githubusercontent.com/96192199/226935232-6b6a73ce-8900-4aee-93f9-733c7e6fef53.png" width="800" >
|
233 |
+
</div>
|
234 |
+
|
235 |
+
<div align="center">
|
236 |
+
<img src="https://user-images.githubusercontent.com/96192199/226969067-968a27c1-1b9c-486b-8b81-ab2de8d3f88a.png" width="800" >
|
237 |
+
</div>
|
238 |
+
|
239 |
+
### Latex paper reading comprehension and abstract generation with one click
|
240 |
+
<div align="center">
|
241 |
+
<img src="https://user-images.githubusercontent.com/96192199/227504406-86ab97cd-f208-41c3-8e4a-7000e51cf980.png" width="800" >
|
242 |
+
</div>
|
243 |
+
|
244 |
+
### Automatic Report Generation
|
245 |
+
<div align="center">
|
246 |
+
<img src="https://user-images.githubusercontent.com/96192199/227503770-fe29ce2c-53fd-47b0-b0ff-93805f0c2ff4.png" height="300" >
|
247 |
+
<img src="https://user-images.githubusercontent.com/96192199/227504617-7a497bb3-0a2a-4b50-9a8a-95ae60ea7afd.png" height="300" >
|
248 |
+
<img src="https://user-images.githubusercontent.com/96192199/227504005-efeaefe0-b687-49d0-bf95-2d7b7e66c348.png" height="300" >
|
249 |
+
</div>
|
250 |
+
|
251 |
+
### Modular Function Design
|
252 |
+
<div align="center">
|
253 |
+
<img src="https://user-images.githubusercontent.com/96192199/229288270-093643c1-0018-487a-81e6-1d7809b6e90f.png" height="400" >
|
254 |
+
<img src="https://user-images.githubusercontent.com/96192199/227504931-19955f78-45cd-4d1c-adac-e71e50957915.png" height="400" >
|
255 |
+
</div>
|
256 |
+
|
257 |
+
|
258 |
+
### Translating source code to English
|
259 |
+
|
260 |
+
<div align="center">
|
261 |
+
<img src="https://user-images.githubusercontent.com/96192199/229720562-fe6c3508-6142-4635-a83d-21eb3669baee.png" height="400" >
|
262 |
+
</div>
|
263 |
+
|
264 |
+
## Todo and Version Planning:
|
265 |
+
|
266 |
+
- version 3 (Todo):
|
267 |
+
- - Support for gpt4 and other llm
|
268 |
+
- version 2.4+ (Todo):
|
269 |
+
- - Summary of long text and token overflow problems in large project source code
|
270 |
+
- - Implementation of project packaging and deployment
|
271 |
+
- - Function plugin parameter interface optimization
|
272 |
+
- - Self-updating
|
273 |
+
- version 2.4: (1) Added PDF full-text translation function; (2) Added input area switching function; (3) Added vertical layout option; (4) Optimized multi-threaded function plugin.
|
274 |
+
- version 2.3: Enhanced multi-threaded interactivity
|
275 |
+
- version 2.2: Function plug-in supports hot reloading
|
276 |
+
- version 2.1: Collapsible layout
|
277 |
+
- version 2.0: Introduction of modular function plugins
|
278 |
+
- version 1.0: Basic functions
|
279 |
+
|
280 |
+
## References and Learning
|
281 |
+
|
282 |
+
|
283 |
+
```
|
284 |
+
The code refers to the design of many other excellent projects, mainly including:
|
285 |
+
|
286 |
+
# Reference Project 1: Referenced the method of reading OpenAI json, recording historical inquiry records, and using gradio queue in ChuanhuChatGPT
|
287 |
+
https://github.com/GaiZhenbiao/ChuanhuChatGPT
|
288 |
+
|
289 |
+
# Reference Project 2:
|
290 |
+
https://github.com/THUDM/ChatGLM-6B
|
291 |
+
|
292 |
+
```
|
293 |
+
|
294 |
+
|