Spaces:
Runtime error
Runtime error
实现Claude聊天功能配置项
Browse files- config.py +6 -1
- request_llm/bridge_all.py +12 -0
config.py
CHANGED
@@ -46,7 +46,7 @@ MAX_RETRY = 2
|
|
46 |
|
47 |
# OpenAI模型选择是(gpt4现在只对申请成功的人开放,体验gpt-4可以试试api2d)
|
48 |
LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓
|
49 |
-
AVAIL_LLM_MODELS = ["gpt-3.5-turbo", "api2d-gpt-3.5-turbo", "gpt-4", "api2d-gpt-4", "chatglm", "moss", "newbing"]
|
50 |
|
51 |
# 本地LLM模型如ChatGLM的执行方式 CPU/GPU
|
52 |
LOCAL_MODEL_DEVICE = "cpu" # 可选 "cuda"
|
@@ -75,3 +75,8 @@ NEWBING_STYLE = "creative" # ["creative", "balanced", "precise"]
|
|
75 |
NEWBING_COOKIES = """
|
76 |
your bing cookies here
|
77 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
46 |
|
47 |
# OpenAI模型选择是(gpt4现在只对申请成功的人开放,体验gpt-4可以试试api2d)
|
48 |
LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓
|
49 |
+
AVAIL_LLM_MODELS = ["gpt-3.5-turbo", "api2d-gpt-3.5-turbo", "gpt-4", "api2d-gpt-4", "chatglm", "moss", "newbing", "claude"]
|
50 |
|
51 |
# 本地LLM模型如ChatGLM的执行方式 CPU/GPU
|
52 |
LOCAL_MODEL_DEVICE = "cpu" # 可选 "cuda"
|
|
|
75 |
NEWBING_COOKIES = """
|
76 |
your bing cookies here
|
77 |
"""
|
78 |
+
|
79 |
+
# slack-claude bot
|
80 |
+
# 下面的id怎么填写具体参见https://zhuanlan.zhihu.com/p/627485689
|
81 |
+
CLAUDE_BOT_ID = ''
|
82 |
+
SLACK_USER_TOKEN = ''
|
request_llm/bridge_all.py
CHANGED
@@ -22,6 +22,9 @@ from .bridge_chatglm import predict as chatglm_ui
|
|
22 |
from .bridge_newbing import predict_no_ui_long_connection as newbing_noui
|
23 |
from .bridge_newbing import predict as newbing_ui
|
24 |
|
|
|
|
|
|
|
25 |
# from .bridge_tgui import predict_no_ui_long_connection as tgui_noui
|
26 |
# from .bridge_tgui import predict as tgui_ui
|
27 |
|
@@ -130,6 +133,15 @@ model_info = {
|
|
130 |
"tokenizer": tokenizer_gpt35,
|
131 |
"token_cnt": get_token_num_gpt35,
|
132 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
133 |
}
|
134 |
|
135 |
|
|
|
22 |
from .bridge_newbing import predict_no_ui_long_connection as newbing_noui
|
23 |
from .bridge_newbing import predict as newbing_ui
|
24 |
|
25 |
+
from .bridge_claude import predict_no_ui_long_connection as claude_noui
|
26 |
+
from .bridge_claude import predict as claude_ui
|
27 |
+
|
28 |
# from .bridge_tgui import predict_no_ui_long_connection as tgui_noui
|
29 |
# from .bridge_tgui import predict as tgui_ui
|
30 |
|
|
|
133 |
"tokenizer": tokenizer_gpt35,
|
134 |
"token_cnt": get_token_num_gpt35,
|
135 |
},
|
136 |
+
# claude
|
137 |
+
"claude": {
|
138 |
+
"fn_with_ui": claude_ui,
|
139 |
+
"fn_without_ui": claude_noui,
|
140 |
+
"endpoint": None,
|
141 |
+
"max_token": 4096,
|
142 |
+
"tokenizer": tokenizer_gpt35,
|
143 |
+
"token_cnt": get_token_num_gpt35,
|
144 |
+
},
|
145 |
}
|
146 |
|
147 |
|