3v324v23 commited on
Commit
93c13aa
1 Parent(s): f238a34

better traceback

Browse files
check_proxy.py CHANGED
@@ -1,8 +1,4 @@
1
 
2
- """
3
- 我:用python的requests库查询本机ip地址所在地
4
- ChatGPT:
5
- """
6
  def check_proxy(proxies):
7
  import requests
8
  proxies_https = proxies['https'] if proxies is not None else '无'
 
1
 
 
 
 
 
2
  def check_proxy(proxies):
3
  import requests
4
  proxies_https = proxies['https'] if proxies is not None else '无'
crazy_functions/读文章写摘要.py CHANGED
@@ -1,25 +1,8 @@
1
- from functools import wraps
2
  from predict import predict_no_ui
 
3
  fast_debug = False
4
 
5
 
6
- def report_execption(chatbot, history, a, b):
7
- chatbot.append((a, b))
8
- history.append(a); history.append(b)
9
-
10
- # 捕获不能预料的异常
11
- def CatchException(f):
12
- @wraps(f)
13
- def decorated(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
14
- try:
15
- yield from f(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT)
16
- except Exception as e:
17
- import traceback
18
- tb_str = traceback.format_exc()
19
- chatbot[-1] = (chatbot[-1][0], f"[Local Message] something error occured: \n {tb_str}")
20
- yield chatbot, history, f'异常 {e}'
21
- return decorated
22
-
23
  def 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt):
24
  import time, glob, os
25
  print('begin analysis on:', file_manifest)
 
 
1
  from predict import predict_no_ui
2
+ from toolbox import CatchException, report_execption
3
  fast_debug = False
4
 
5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  def 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt):
7
  import time, glob, os
8
  print('begin analysis on:', file_manifest)
functional_crazy.py CHANGED
@@ -1,18 +1,7 @@
1
- from functools import wraps
2
  from predict import predict_no_ui
 
3
  fast_debug = False
4
 
5
- def CatchException(f):
6
- @wraps(f)
7
- def decorated(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
8
- try:
9
- yield from f(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT)
10
- except Exception as e:
11
- import traceback
12
- tb_str = traceback.format_exc()
13
- chatbot[-1] = (chatbot[-1][0], f"[Local Message] something error occured: \n {tb_str}")
14
- yield chatbot, history, f'异常 {e}'
15
- return decorated
16
 
17
 
18
  @CatchException
@@ -66,12 +55,6 @@ def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTx
66
  history.append(i_say); history.append(gpt_say)
67
  yield chatbot, history, '正常'
68
 
69
- def report_execption(chatbot, history, a, b):
70
- chatbot.append((a, b))
71
- history.append(a); history.append(b)
72
-
73
-
74
-
75
 
76
 
77
  def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt):
 
 
1
  from predict import predict_no_ui
2
+ from toolbox import CatchException, report_execption
3
  fast_debug = False
4
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
 
7
  @CatchException
 
55
  history.append(i_say); history.append(gpt_say)
56
  yield chatbot, history, '正常'
57
 
 
 
 
 
 
 
58
 
59
 
60
  def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt):
main.py CHANGED
@@ -1,20 +1,11 @@
1
  import os; os.environ['no_proxy'] = '*'
2
  import gradio as gr
3
- import markdown, mdtex2html
4
  from predict import predict
5
- from show_math import convert as convert_math
6
 
7
  try: from config_private import proxies, WEB_PORT # 放自己的秘密如API和代理网址 os.path.exists('config_private.py')
8
  except: from config import proxies, WEB_PORT
9
 
10
- def find_free_port():
11
- import socket
12
- from contextlib import closing
13
- with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
14
- s.bind(('', 0))
15
- s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
16
- return s.getsockname()[1]
17
-
18
  PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
19
 
20
  initial_prompt = "Serve me as a writing and programming assistant."
@@ -33,41 +24,6 @@ functional = get_functionals()
33
  from functional_crazy import get_crazy_functionals
34
  crazy_functional = get_crazy_functionals()
35
 
36
- def reset_textbox(): return gr.update(value='')
37
-
38
- def text_divide_paragraph(text):
39
- if '```' in text:
40
- # careful input
41
- return text
42
- else:
43
- # wtf input
44
- lines = text.split("\n")
45
- for i, line in enumerate(lines):
46
- if i!=0: lines[i] = "<p>"+lines[i].replace(" ", "&nbsp;")+"</p>"
47
- text = "".join(lines)
48
- return text
49
-
50
- def markdown_convertion(txt):
51
- if ('$' in txt) and ('```' not in txt):
52
- math_config = {'mdx_math': {'enable_dollar_delimiter': True}}
53
- return markdown.markdown(txt,extensions=['fenced_code','tables']) + '<br><br>' + \
54
- markdown.markdown(convert_math(txt, splitParagraphs=False),extensions=['fenced_code','tables'])
55
- else:
56
- return markdown.markdown(txt,extensions=['fenced_code','tables'])
57
-
58
-
59
- def format_io(self,y):
60
- if y is None:
61
- return []
62
- i_ask, gpt_reply = y[-1]
63
-
64
- i_ask = text_divide_paragraph(i_ask) # 输入部分太自由,预处理一波
65
-
66
- y[-1] = (
67
- None if i_ask is None else markdown.markdown(i_ask, extensions=['fenced_code','tables']),
68
- None if gpt_reply is None else markdown_convertion(gpt_reply)
69
- )
70
- return y
71
  gr.Chatbot.postprocess = format_io
72
 
73
  with gr.Blocks() as demo:
@@ -103,7 +59,6 @@ with gr.Blocks() as demo:
103
 
104
  txt.submit(predict, [txt, top_p, temperature, chatbot, history, systemPromptTxt], [chatbot, history, statusDisplay])
105
  submitBtn.click(predict, [txt, top_p, temperature, chatbot, history, systemPromptTxt], [chatbot, history, statusDisplay], show_progress=True)
106
- # submitBtn.click(reset_textbox, [], [txt])
107
  for k in functional:
108
  functional[k]["Button"].click(predict,
109
  [txt, top_p, temperature, chatbot, history, systemPromptTxt, TRUE, gr.State(k)], [chatbot, history, statusDisplay], show_progress=True)
@@ -111,15 +66,15 @@ with gr.Blocks() as demo:
111
  crazy_functional[k]["Button"].click(crazy_functional[k]["Function"],
112
  [txt, top_p, temperature, chatbot, history, systemPromptTxt, gr.State(PORT)], [chatbot, history, statusDisplay])
113
 
114
- print(f"URL http://localhost:{PORT}")
115
- demo.title = "ChatGPT 学术优化"
116
 
117
  def auto_opentab_delay():
118
  import threading, webbrowser, time
 
119
  def open(): time.sleep(2)
120
  webbrowser.open_new_tab(f'http://localhost:{PORT}')
121
  t = threading.Thread(target=open)
122
  t.daemon = True; t.start()
123
 
124
  auto_opentab_delay()
 
125
  demo.queue().launch(server_name="0.0.0.0", share=True, server_port=PORT)
 
1
  import os; os.environ['no_proxy'] = '*'
2
  import gradio as gr
 
3
  from predict import predict
4
+ from toolbox import format_io, find_free_port
5
 
6
  try: from config_private import proxies, WEB_PORT # 放自己的秘密如API和代理网址 os.path.exists('config_private.py')
7
  except: from config import proxies, WEB_PORT
8
 
 
 
 
 
 
 
 
 
9
  PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
10
 
11
  initial_prompt = "Serve me as a writing and programming assistant."
 
24
  from functional_crazy import get_crazy_functionals
25
  crazy_functional = get_crazy_functionals()
26
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  gr.Chatbot.postprocess = format_io
28
 
29
  with gr.Blocks() as demo:
 
59
 
60
  txt.submit(predict, [txt, top_p, temperature, chatbot, history, systemPromptTxt], [chatbot, history, statusDisplay])
61
  submitBtn.click(predict, [txt, top_p, temperature, chatbot, history, systemPromptTxt], [chatbot, history, statusDisplay], show_progress=True)
 
62
  for k in functional:
63
  functional[k]["Button"].click(predict,
64
  [txt, top_p, temperature, chatbot, history, systemPromptTxt, TRUE, gr.State(k)], [chatbot, history, statusDisplay], show_progress=True)
 
66
  crazy_functional[k]["Button"].click(crazy_functional[k]["Function"],
67
  [txt, top_p, temperature, chatbot, history, systemPromptTxt, gr.State(PORT)], [chatbot, history, statusDisplay])
68
 
 
 
69
 
70
  def auto_opentab_delay():
71
  import threading, webbrowser, time
72
+ print(f"URL http://localhost:{PORT}")
73
  def open(): time.sleep(2)
74
  webbrowser.open_new_tab(f'http://localhost:{PORT}')
75
  t = threading.Thread(target=open)
76
  t.daemon = True; t.start()
77
 
78
  auto_opentab_delay()
79
+ demo.title = "ChatGPT 学术优化"
80
  demo.queue().launch(server_name="0.0.0.0", share=True, server_port=PORT)
toolbox.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import markdown, mdtex2html
2
+ from show_math import convert as convert_math
3
+ from functools import wraps
4
+
5
+
6
+ def regular_txt_to_markdown(text):
7
+ text = text.replace('\n', '\n\n')
8
+ text = text.replace('\n\n\n', '\n\n')
9
+ text = text.replace('\n\n\n', '\n\n')
10
+ return text
11
+
12
+ def CatchException(f):
13
+ @wraps(f)
14
+ def decorated(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
15
+ try:
16
+ yield from f(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT)
17
+ except Exception as e:
18
+ import traceback
19
+ from check_proxy import check_proxy
20
+ try: from config_private import proxies
21
+ except: from config import proxies
22
+ tb_str = regular_txt_to_markdown(traceback.format_exc())
23
+ chatbot[-1] = (chatbot[-1][0], f"[Local Message] 实验性函数调用出错: \n\n {tb_str} \n\n 当前代理可用性: \n\n {check_proxy(proxies)}")
24
+ yield chatbot, history, f'异常 {e}'
25
+ return decorated
26
+
27
+ def report_execption(chatbot, history, a, b):
28
+ chatbot.append((a, b))
29
+ history.append(a); history.append(b)
30
+
31
+ def text_divide_paragraph(text):
32
+ if '```' in text:
33
+ # careful input
34
+ return text
35
+ else:
36
+ # wtf input
37
+ lines = text.split("\n")
38
+ for i, line in enumerate(lines):
39
+ if i!=0: lines[i] = "<p>"+lines[i].replace(" ", "&nbsp;")+"</p>"
40
+ text = "".join(lines)
41
+ return text
42
+
43
+ def markdown_convertion(txt):
44
+ if ('$' in txt) and ('```' not in txt):
45
+ return markdown.markdown(txt,extensions=['fenced_code','tables']) + '<br><br>' + \
46
+ markdown.markdown(convert_math(txt, splitParagraphs=False),extensions=['fenced_code','tables'])
47
+ else:
48
+ return markdown.markdown(txt,extensions=['fenced_code','tables'])
49
+
50
+
51
+ def format_io(self, y):
52
+ if y is None: return []
53
+ i_ask, gpt_reply = y[-1]
54
+ i_ask = text_divide_paragraph(i_ask) # 输入部分太自由,预处理一波
55
+ y[-1] = (
56
+ None if i_ask is None else markdown.markdown(i_ask, extensions=['fenced_code','tables']),
57
+ None if gpt_reply is None else markdown_convertion(gpt_reply)
58
+ )
59
+ return y
60
+
61
+
62
+ def find_free_port():
63
+ import socket
64
+ from contextlib import closing
65
+ with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
66
+ s.bind(('', 0))
67
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
68
+ return s.getsockname()[1]