yuxiaoyuan0406 commited on
Commit
f0ff1f2
1 Parent(s): 7dd73e1

添加CUSTOM_PATH来部署到子级路径

Browse files
Files changed (2) hide show
  1. config.py +2 -0
  2. main.py +15 -14
config.py CHANGED
@@ -56,3 +56,5 @@ CONCURRENT_COUNT = 100
56
  # 设置用户名和密码(相关功能不稳定,与gradio版本和网络都相关,如果本地使用不建议加这个)
57
  # [("username", "password"), ("username2", "password2"), ...]
58
  AUTHENTICATION = []
 
 
 
56
  # 设置用户名和密码(相关功能不稳定,与gradio版本和网络都相关,如果本地使用不建议加这个)
57
  # [("username", "password"), ("username2", "password2"), ...]
58
  AUTHENTICATION = []
59
+
60
+ CUSTOM_PATH = "/"
main.py CHANGED
@@ -1,11 +1,11 @@
1
  import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染
2
  import gradio as gr
3
  from request_llm.bridge_chatgpt import predict
4
- from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper, DummyWith
5
 
6
  # 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
7
- proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT, LAYOUT, API_KEY = \
8
- get_conf('proxies', 'WEB_PORT', 'LLM_MODEL', 'CONCURRENT_COUNT', 'AUTHENTICATION', 'CHATBOT_HEIGHT', 'LAYOUT', 'API_KEY')
9
 
10
  # 如果WEB_PORT是-1, 则随机选取WEB端口
11
  PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
@@ -171,21 +171,22 @@ def auto_opentab_delay():
171
  threading.Thread(target=auto_update, name="self-upgrade", daemon=True).start()
172
 
173
  auto_opentab_delay()
174
- # demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", server_port=PORT, auth=AUTHENTICATION)
175
  demo.queue(concurrency_count=CONCURRENT_COUNT)
176
 
177
- CUSTOM_PATH = '/chatgpt'
 
 
178
 
179
- import uvicorn
180
- from fastapi import FastAPI
181
 
182
- app = FastAPI()
 
 
183
 
184
- @app.get("/")
185
- def read_main():
186
- return {"message": "NULL"}
187
 
188
- app = gr.mount_gradio_app(app, demo, path=CUSTOM_PATH)
189
-
190
- if __name__ == '__main__':
191
  uvicorn.run(app, host="0.0.0.0", port=PORT)
 
 
 
 
 
1
  import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染
2
  import gradio as gr
3
  from request_llm.bridge_chatgpt import predict
4
+ from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper, custom_path_check, DummyWith
5
 
6
  # 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
7
+ proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT, LAYOUT, API_KEY, CUSTOM_PATH = \
8
+ get_conf('proxies', 'WEB_PORT', 'LLM_MODEL', 'CONCURRENT_COUNT', 'AUTHENTICATION', 'CHATBOT_HEIGHT', 'LAYOUT', 'API_KEY', 'CUSTOM_PATH')
9
 
10
  # 如果WEB_PORT是-1, 则随机选取WEB端口
11
  PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
 
171
  threading.Thread(target=auto_update, name="self-upgrade", daemon=True).start()
172
 
173
  auto_opentab_delay()
 
174
  demo.queue(concurrency_count=CONCURRENT_COUNT)
175
 
176
+ if custom_path_check(CUSTOM_PATH):
177
+ import uvicorn
178
+ from fastapi import FastAPI
179
 
180
+ app = FastAPI()
 
181
 
182
+ @app.get("/")
183
+ def read_main():
184
+ return {"message": "NULL"}
185
 
186
+ app = gr.mount_gradio_app(app, demo, path=CUSTOM_PATH)
 
 
187
 
 
 
 
188
  uvicorn.run(app, host="0.0.0.0", port=PORT)
189
+ else:
190
+ demo.launch(server_name="0.0.0.0", server_port=PORT, auth=AUTHENTICATION)
191
+
192
+