qingxu99 commited on
Commit
84fc864
1 Parent(s): a554b7f

修正moss和chatglm的环境依赖

Browse files
docs/GithubAction+ChatGLM+Moss CHANGED
@@ -3,7 +3,7 @@
3
  FROM nvidia/cuda:11.3.1-runtime-ubuntu20.04
4
  ARG useProxyNetwork=''
5
  RUN apt-get update
6
- RUN apt-get install -y curl proxychains curl
7
  RUN apt-get install -y git python python3 python-dev python3-dev --fix-missing
8
 
9
 
@@ -21,12 +21,7 @@ RUN python3 -m pip install -r request_llm/requirements_moss.txt
21
  RUN python3 -m pip install -r request_llm/requirements_chatglm.txt
22
  RUN python3 -m pip install -r request_llm/requirements_newbing.txt
23
 
24
- # # 预热CHATGLM参数(非必要 可选步骤)
25
- # RUN echo ' \n\
26
- # from transformers import AutoModel, AutoTokenizer \n\
27
- # chatglm_tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True) \n\
28
- # chatglm_model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).float() ' >> warm_up_chatglm.py
29
- # RUN python3 -u warm_up_chatglm.py
30
 
31
  # 预热Tiktoken模块
32
  RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()'
 
3
  FROM nvidia/cuda:11.3.1-runtime-ubuntu20.04
4
  ARG useProxyNetwork=''
5
  RUN apt-get update
6
+ RUN apt-get install -y curl proxychains curl gcc
7
  RUN apt-get install -y git python python3 python-dev python3-dev --fix-missing
8
 
9
 
 
21
  RUN python3 -m pip install -r request_llm/requirements_chatglm.txt
22
  RUN python3 -m pip install -r request_llm/requirements_newbing.txt
23
 
24
+
 
 
 
 
 
25
 
26
  # 预热Tiktoken模块
27
  RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()'
request_llm/bridge_chatglm.py CHANGED
@@ -68,7 +68,8 @@ class GetGLMHandle(Process):
68
  # command = self.child.recv()
69
  # if command == '[Terminate]': break
70
  except:
71
- self.child.send('[Local Message] Call ChatGLM fail.')
 
72
  # 请求处理结束,开始下一个循环
73
  self.child.send('[Finish]')
74
 
 
68
  # command = self.child.recv()
69
  # if command == '[Terminate]': break
70
  except:
71
+ from toolbox import trimmed_format_exc
72
+ self.child.send('[Local Message] Call ChatGLM fail.' + '\n```\n' + trimmed_format_exc() + '\n```\n')
73
  # 请求处理结束,开始下一个循环
74
  self.child.send('[Finish]')
75