Spaces:
				
			
			
	
			
			
					
		Running
		
	
	
	
			
			
	
	
	
	
		
		
					
		Running
		
	change claude model name to stack-claude
Browse files- config.py +6 -6
 - request_llm/README.md +25 -0
 - request_llm/bridge_all.py +15 -14
 - request_llm/bridge_newbing.py +1 -1
 - request_llm/{bridge_claude.py → bridge_stackclaude.py} +5 -7
 
    	
        config.py
    CHANGED
    
    | 
         @@ -44,9 +44,10 @@ WEB_PORT = -1 
     | 
|
| 44 | 
         
             
            # 如果OpenAI不响应(网络卡顿、代理失败、KEY失效),重试的次数限制
         
     | 
| 45 | 
         
             
            MAX_RETRY = 2
         
     | 
| 46 | 
         | 
| 47 | 
         
            -
            #  
     | 
| 48 | 
         
             
            LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓
         
     | 
| 49 | 
         
            -
            AVAIL_LLM_MODELS = ["gpt-3.5-turbo", "api2d-gpt-3.5-turbo", "gpt-4", "api2d-gpt-4", "chatglm", "moss", "newbing", "claude"]
         
     | 
| 
         | 
|
| 50 | 
         | 
| 51 | 
         
             
            # 本地LLM模型如ChatGLM的执行方式 CPU/GPU
         
     | 
| 52 | 
         
             
            LOCAL_MODEL_DEVICE = "cpu" # 可选 "cuda"
         
     | 
| 
         @@ -76,7 +77,6 @@ NEWBING_COOKIES = """ 
     | 
|
| 76 | 
         
             
            your bing cookies here
         
     | 
| 77 | 
         
             
            """
         
     | 
| 78 | 
         | 
| 79 | 
         
            -
            #  
     | 
| 80 | 
         
            -
             
     | 
| 81 | 
         
            -
             
     | 
| 82 | 
         
            -
            SLACK_USER_TOKEN = ''
         
     | 
| 
         | 
|
| 44 | 
         
             
            # 如果OpenAI不响应(网络卡顿、代理失败、KEY失效),重试的次数限制
         
     | 
| 45 | 
         
             
            MAX_RETRY = 2
         
     | 
| 46 | 
         | 
| 47 | 
         
            +
            # 模型选择是
         
     | 
| 48 | 
         
             
            LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓
         
     | 
| 49 | 
         
            +
            AVAIL_LLM_MODELS = ["gpt-3.5-turbo", "api2d-gpt-3.5-turbo", "gpt-4", "api2d-gpt-4", "chatglm", "moss", "newbing", "stack-claude"]
         
     | 
| 50 | 
         
            +
            # P.S. 其他可用的模型还包括 ["jittorllms_rwkv", "jittorllms_pangualpha", "jittorllms_llama"]
         
     | 
| 51 | 
         | 
| 52 | 
         
             
            # 本地LLM模型如ChatGLM的执行方式 CPU/GPU
         
     | 
| 53 | 
         
             
            LOCAL_MODEL_DEVICE = "cpu" # 可选 "cuda"
         
     | 
| 
         | 
|
| 77 | 
         
             
            your bing cookies here
         
     | 
| 78 | 
         
             
            """
         
     | 
| 79 | 
         | 
| 80 | 
         
            +
            # Slack Claude bot, 使用教程详情见 request_llm/README.md
         
     | 
| 81 | 
         
            +
            SLACK_CLAUDE_BOT_ID = ''   
         
     | 
| 82 | 
         
            +
            SLACK_CLAUDE_USER_TOKEN = ''
         
     | 
| 
         | 
    	
        request_llm/README.md
    CHANGED
    
    | 
         @@ -13,6 +13,31 @@ LLM_MODEL = "chatglm" 
     | 
|
| 13 | 
         
             
            `python main.py`
         
     | 
| 14 | 
         
             
            ``` 
         
     | 
| 15 | 
         | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 16 | 
         | 
| 17 | 
         
             
            ---
         
     | 
| 18 | 
         
             
            ## Text-Generation-UI (TGUI,调试中,暂不可用)
         
     | 
| 
         | 
|
| 13 | 
         
             
            `python main.py`
         
     | 
| 14 | 
         
             
            ``` 
         
     | 
| 15 | 
         | 
| 16 | 
         
            +
            ## Claude-Stack
         
     | 
| 17 | 
         
            +
             
     | 
| 18 | 
         
            +
            - 请参考此教程获取  https://zhuanlan.zhihu.com/p/627485689
         
     | 
| 19 | 
         
            +
                - 1、SLACK_CLAUDE_BOT_ID 
         
     | 
| 20 | 
         
            +
                - 2、SLACK_CLAUDE_USER_TOKEN
         
     | 
| 21 | 
         
            +
             
     | 
| 22 | 
         
            +
            - 把token加入config.py
         
     | 
| 23 | 
         
            +
             
     | 
| 24 | 
         
            +
            ## Newbing
         
     | 
| 25 | 
         
            +
             
     | 
| 26 | 
         
            +
            - 使用cookie editor获取cookie(json)
         
     | 
| 27 | 
         
            +
            - 把cookie(json)加入config.py (NEWBING_COOKIES)
         
     | 
| 28 | 
         
            +
             
     | 
| 29 | 
         
            +
            ## Moss
         
     | 
| 30 | 
         
            +
            - 使用docker-compose
         
     | 
| 31 | 
         
            +
             
     | 
| 32 | 
         
            +
            ## RWKV
         
     | 
| 33 | 
         
            +
            - 使用docker-compose
         
     | 
| 34 | 
         
            +
             
     | 
| 35 | 
         
            +
            ## LLAMA
         
     | 
| 36 | 
         
            +
            - 使用docker-compose
         
     | 
| 37 | 
         
            +
             
     | 
| 38 | 
         
            +
            ## 盘古
         
     | 
| 39 | 
         
            +
            - 使用docker-compose
         
     | 
| 40 | 
         
            +
             
     | 
| 41 | 
         | 
| 42 | 
         
             
            ---
         
     | 
| 43 | 
         
             
            ## Text-Generation-UI (TGUI,调试中,暂不可用)
         
     | 
    	
        request_llm/bridge_all.py
    CHANGED
    
    | 
         @@ -22,9 +22,6 @@ from .bridge_chatglm import predict as chatglm_ui 
     | 
|
| 22 | 
         
             
            from .bridge_newbing import predict_no_ui_long_connection as newbing_noui
         
     | 
| 23 | 
         
             
            from .bridge_newbing import predict as newbing_ui
         
     | 
| 24 | 
         | 
| 25 | 
         
            -
            from .bridge_claude import predict_no_ui_long_connection as claude_noui
         
     | 
| 26 | 
         
            -
            from .bridge_claude import predict as claude_ui
         
     | 
| 27 | 
         
            -
             
     | 
| 28 | 
         
             
            # from .bridge_tgui import predict_no_ui_long_connection as tgui_noui
         
     | 
| 29 | 
         
             
            # from .bridge_tgui import predict as tgui_ui
         
     | 
| 30 | 
         | 
| 
         @@ -133,15 +130,7 @@ model_info = { 
     | 
|
| 133 | 
         
             
                    "tokenizer": tokenizer_gpt35,
         
     | 
| 134 | 
         
             
                    "token_cnt": get_token_num_gpt35,
         
     | 
| 135 | 
         
             
                },
         
     | 
| 136 | 
         
            -
             
     | 
| 137 | 
         
            -
                "claude": {
         
     | 
| 138 | 
         
            -
                    "fn_with_ui": claude_ui,
         
     | 
| 139 | 
         
            -
                    "fn_without_ui": claude_noui,
         
     | 
| 140 | 
         
            -
                    "endpoint": None,
         
     | 
| 141 | 
         
            -
                    "max_token": 4096,
         
     | 
| 142 | 
         
            -
                    "tokenizer": tokenizer_gpt35,
         
     | 
| 143 | 
         
            -
                    "token_cnt": get_token_num_gpt35,
         
     | 
| 144 | 
         
            -
                },
         
     | 
| 145 | 
         
             
            }
         
     | 
| 146 | 
         | 
| 147 | 
         | 
| 
         @@ -198,8 +187,20 @@ if "moss" in AVAIL_LLM_MODELS: 
     | 
|
| 198 | 
         
             
                        "token_cnt": get_token_num_gpt35,
         
     | 
| 199 | 
         
             
                    },
         
     | 
| 200 | 
         
             
                })
         
     | 
| 201 | 
         
            -
             
     | 
| 202 | 
         
            -
             
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 203 | 
         | 
| 204 | 
         | 
| 205 | 
         
             
            def LLM_CATCH_EXCEPTION(f):
         
     | 
| 
         | 
|
| 22 | 
         
             
            from .bridge_newbing import predict_no_ui_long_connection as newbing_noui
         
     | 
| 23 | 
         
             
            from .bridge_newbing import predict as newbing_ui
         
     | 
| 24 | 
         | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 25 | 
         
             
            # from .bridge_tgui import predict_no_ui_long_connection as tgui_noui
         
     | 
| 26 | 
         
             
            # from .bridge_tgui import predict as tgui_ui
         
     | 
| 27 | 
         | 
| 
         | 
|
| 130 | 
         
             
                    "tokenizer": tokenizer_gpt35,
         
     | 
| 131 | 
         
             
                    "token_cnt": get_token_num_gpt35,
         
     | 
| 132 | 
         
             
                },
         
     | 
| 133 | 
         
            +
             
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 134 | 
         
             
            }
         
     | 
| 135 | 
         | 
| 136 | 
         | 
| 
         | 
|
| 187 | 
         
             
                        "token_cnt": get_token_num_gpt35,
         
     | 
| 188 | 
         
             
                    },
         
     | 
| 189 | 
         
             
                })
         
     | 
| 190 | 
         
            +
            if "stack-claude" in AVAIL_LLM_MODELS:
         
     | 
| 191 | 
         
            +
                from .bridge_stackclaude import predict_no_ui_long_connection as claude_noui
         
     | 
| 192 | 
         
            +
                from .bridge_stackclaude import predict as claude_ui
         
     | 
| 193 | 
         
            +
                # claude
         
     | 
| 194 | 
         
            +
                model_info.update({
         
     | 
| 195 | 
         
            +
                    "stack-claude": {
         
     | 
| 196 | 
         
            +
                        "fn_with_ui": claude_ui,
         
     | 
| 197 | 
         
            +
                        "fn_without_ui": claude_noui,
         
     | 
| 198 | 
         
            +
                        "endpoint": None,
         
     | 
| 199 | 
         
            +
                        "max_token": 8192,
         
     | 
| 200 | 
         
            +
                        "tokenizer": tokenizer_gpt35,
         
     | 
| 201 | 
         
            +
                        "token_cnt": get_token_num_gpt35,
         
     | 
| 202 | 
         
            +
                    }
         
     | 
| 203 | 
         
            +
                })
         
     | 
| 204 | 
         | 
| 205 | 
         | 
| 206 | 
         
             
            def LLM_CATCH_EXCEPTION(f):
         
     | 
    	
        request_llm/bridge_newbing.py
    CHANGED
    
    | 
         @@ -153,7 +153,7 @@ class NewBingHandle(Process): 
     | 
|
| 153 | 
         
             
                        # 进入任务等待状态
         
     | 
| 154 | 
         
             
                        asyncio.run(self.async_run())
         
     | 
| 155 | 
         
             
                    except Exception:
         
     | 
| 156 | 
         
            -
                        tb_str = '```\n' + trimmed_format_exc() + ' 
     | 
| 157 | 
         
             
                        self.child.send(f'[Local Message] Newbing失败 {tb_str}.')
         
     | 
| 158 | 
         
             
                        self.child.send('[Fail]')
         
     | 
| 159 | 
         
             
                        self.child.send('[Finish]')
         
     | 
| 
         | 
|
| 153 | 
         
             
                        # 进入任务等待状态
         
     | 
| 154 | 
         
             
                        asyncio.run(self.async_run())
         
     | 
| 155 | 
         
             
                    except Exception:
         
     | 
| 156 | 
         
            +
                        tb_str = '\n```\n' + trimmed_format_exc() + '\n```\n'
         
     | 
| 157 | 
         
             
                        self.child.send(f'[Local Message] Newbing失败 {tb_str}.')
         
     | 
| 158 | 
         
             
                        self.child.send('[Fail]')
         
     | 
| 159 | 
         
             
                        self.child.send('[Finish]')
         
     | 
    	
        request_llm/{bridge_claude.py → bridge_stackclaude.py}
    RENAMED
    
    | 
         @@ -9,8 +9,6 @@ from toolbox import get_conf 
     | 
|
| 9 | 
         
             
            from slack_sdk.errors import SlackApiError
         
     | 
| 10 | 
         
             
            from slack_sdk.web.async_client import AsyncWebClient
         
     | 
| 11 | 
         
             
            import asyncio
         
     | 
| 12 | 
         
            -
            import sys
         
     | 
| 13 | 
         
            -
            sys.path.append('..')
         
     | 
| 14 | 
         | 
| 15 | 
         | 
| 16 | 
         
             
            """
         
     | 
| 
         @@ -38,7 +36,7 @@ class SlackClient(AsyncWebClient): 
     | 
|
| 38 | 
         
             
                CHANNEL_ID = None
         
     | 
| 39 | 
         | 
| 40 | 
         
             
                async def open_channel(self):
         
     | 
| 41 | 
         
            -
                    response = await self.conversations_open(users=get_conf(' 
     | 
| 42 | 
         
             
                    self.CHANNEL_ID = response["channel"]["id"]
         
     | 
| 43 | 
         | 
| 44 | 
         
             
                async def chat(self, text):
         
     | 
| 
         @@ -53,7 +51,7 @@ class SlackClient(AsyncWebClient): 
     | 
|
| 53 | 
         
             
                        # TODO:暂时不支持历史消息,因为在同一个频道里存在多人使用时历史消息渗透问题
         
     | 
| 54 | 
         
             
                        resp = await self.conversations_history(channel=self.CHANNEL_ID, oldest=self.LAST_TS, limit=1)
         
     | 
| 55 | 
         
             
                        msg = [msg for msg in resp["messages"]
         
     | 
| 56 | 
         
            -
                            if msg.get("user") == get_conf(' 
     | 
| 57 | 
         
             
                        return msg
         
     | 
| 58 | 
         
             
                    except (SlackApiError, KeyError) as e:
         
     | 
| 59 | 
         
             
                        raise RuntimeError(f"获取Slack消息失败。")
         
     | 
| 
         @@ -174,8 +172,8 @@ class ClaudeHandle(Process): 
     | 
|
| 174 | 
         
             
                            self.proxies_https = proxies['https']
         
     | 
| 175 | 
         | 
| 176 | 
         
             
                        try:
         
     | 
| 177 | 
         
            -
                             
     | 
| 178 | 
         
            -
                            self.claude_model = SlackClient(token= 
     | 
| 179 | 
         
             
                            print('Claude组件初始化成功。')
         
     | 
| 180 | 
         
             
                        except:
         
     | 
| 181 | 
         
             
                            self.success = False
         
     | 
| 
         @@ -190,7 +188,7 @@ class ClaudeHandle(Process): 
     | 
|
| 190 | 
         
             
                        # 进入任务等待状态
         
     | 
| 191 | 
         
             
                        asyncio.run(self.async_run())
         
     | 
| 192 | 
         
             
                    except Exception:
         
     | 
| 193 | 
         
            -
                        tb_str = '```\n' + trimmed_format_exc() + ' 
     | 
| 194 | 
         
             
                        self.child.send(f'[Local Message] Claude失败 {tb_str}.')
         
     | 
| 195 | 
         
             
                        self.child.send('[Fail]')
         
     | 
| 196 | 
         
             
                        self.child.send('[Finish]')
         
     | 
| 
         | 
|
| 9 | 
         
             
            from slack_sdk.errors import SlackApiError
         
     | 
| 10 | 
         
             
            from slack_sdk.web.async_client import AsyncWebClient
         
     | 
| 11 | 
         
             
            import asyncio
         
     | 
| 
         | 
|
| 
         | 
|
| 12 | 
         | 
| 13 | 
         | 
| 14 | 
         
             
            """
         
     | 
| 
         | 
|
| 36 | 
         
             
                CHANNEL_ID = None
         
     | 
| 37 | 
         | 
| 38 | 
         
             
                async def open_channel(self):
         
     | 
| 39 | 
         
            +
                    response = await self.conversations_open(users=get_conf('SLACK_CLAUDE_BOT_ID')[0])
         
     | 
| 40 | 
         
             
                    self.CHANNEL_ID = response["channel"]["id"]
         
     | 
| 41 | 
         | 
| 42 | 
         
             
                async def chat(self, text):
         
     | 
| 
         | 
|
| 51 | 
         
             
                        # TODO:暂时不支持历史消息,因为在同一个频道里存在多人使用时历史消息渗透问题
         
     | 
| 52 | 
         
             
                        resp = await self.conversations_history(channel=self.CHANNEL_ID, oldest=self.LAST_TS, limit=1)
         
     | 
| 53 | 
         
             
                        msg = [msg for msg in resp["messages"]
         
     | 
| 54 | 
         
            +
                            if msg.get("user") == get_conf('SLACK_CLAUDE_BOT_ID')[0]]
         
     | 
| 55 | 
         
             
                        return msg
         
     | 
| 56 | 
         
             
                    except (SlackApiError, KeyError) as e:
         
     | 
| 57 | 
         
             
                        raise RuntimeError(f"获取Slack消息失败。")
         
     | 
| 
         | 
|
| 172 | 
         
             
                            self.proxies_https = proxies['https']
         
     | 
| 173 | 
         | 
| 174 | 
         
             
                        try:
         
     | 
| 175 | 
         
            +
                            SLACK_CLAUDE_USER_TOKEN, = get_conf('SLACK_CLAUDE_USER_TOKEN')
         
     | 
| 176 | 
         
            +
                            self.claude_model = SlackClient(token=SLACK_CLAUDE_USER_TOKEN, proxy=self.proxies_https)
         
     | 
| 177 | 
         
             
                            print('Claude组件初始化成功。')
         
     | 
| 178 | 
         
             
                        except:
         
     | 
| 179 | 
         
             
                            self.success = False
         
     | 
| 
         | 
|
| 188 | 
         
             
                        # 进入任务等待状态
         
     | 
| 189 | 
         
             
                        asyncio.run(self.async_run())
         
     | 
| 190 | 
         
             
                    except Exception:
         
     | 
| 191 | 
         
            +
                        tb_str = '\n```\n' + trimmed_format_exc() + '\n```\n'
         
     | 
| 192 | 
         
             
                        self.child.send(f'[Local Message] Claude失败 {tb_str}.')
         
     | 
| 193 | 
         
             
                        self.child.send('[Fail]')
         
     | 
| 194 | 
         
             
                        self.child.send('[Finish]')
         
     |