Rfym21 commited on
Commit
0cac273
·
verified ·
1 Parent(s): 30c8c0b

Upload 12 files

Browse files
Files changed (12) hide show
  1. .env +1 -0
  2. Dockerfile +22 -11
  3. api/__init__.py +0 -0
  4. api/app.py +34 -0
  5. api/auth.py +10 -0
  6. api/config.py +36 -0
  7. api/logger.py +20 -0
  8. api/models.py +16 -0
  9. api/routes.py +62 -0
  10. api/utils.py +158 -0
  11. main.py +5 -0
  12. requirements.txt +7 -0
.env ADDED
@@ -0,0 +1 @@
 
 
1
+ APP_SECRET=123456
Dockerfile CHANGED
@@ -1,12 +1,23 @@
1
- # 使用指定的基础镜像
2
- FROM snailyc/blackbox2api
3
-
4
- # 设置环境变量
5
- ENV PORT=8001 \
6
- APP_SECRET=Rfym21
7
-
8
- # 暴露端口 8001
9
- EXPOSE 8001
10
-
11
- # 启动命令
 
 
 
 
 
 
 
 
 
 
 
12
  CMD ["python", "main.py"]
 
1
+ # 使用官方Python基础镜像
2
+ FROM python:3.9-slim
3
+
4
+ # 设置工作目录
5
+ WORKDIR /app
6
+
7
+ # 复制依赖文件到工作目录
8
+ COPY requirements.txt .
9
+
10
+ # 安装依赖
11
+ RUN pip install --no-cache-dir -r requirements.txt
12
+
13
+ # 复制应用程序代码到容器中
14
+ COPY . .
15
+
16
+ # 暴露端口(根据你的应用程序监听的端口调整)
17
+ EXPOSE 8001
18
+
19
+ # 设置环境变量(例子)
20
+ ENV APP_SECRET=Rfym21
21
+
22
+ # 运行应用程序的命令
23
  CMD ["python", "main.py"]
api/__init__.py ADDED
File without changes
api/app.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI, Request
2
+ from starlette.middleware.cors import CORSMiddleware
3
+ from fastapi.responses import JSONResponse
4
+ from api.logger import setup_logger
5
+ from api.routes import router # 导入router而不是单独的函数
6
+
7
+ logger = setup_logger(__name__)
8
+
9
+ def create_app():
10
+ app = FastAPI()
11
+
12
+ # 配置CORS
13
+ app.add_middleware(
14
+ CORSMiddleware,
15
+ allow_origins=["*"],
16
+ allow_credentials=True,
17
+ allow_methods=["*"],
18
+ allow_headers=["*"],
19
+ )
20
+
21
+ # 添加路由
22
+ app.include_router(router)
23
+
24
+ @app.exception_handler(Exception)
25
+ async def global_exception_handler(request: Request, exc: Exception):
26
+ logger.error(f"An error occurred: {str(exc)}")
27
+ return JSONResponse(
28
+ status_code=500,
29
+ content={"message": "An internal server error occurred."},
30
+ )
31
+
32
+ return app
33
+
34
+ app = create_app()
api/auth.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import Depends, HTTPException
2
+ from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
3
+ from api.config import APP_SECRET
4
+
5
+ security = HTTPBearer()
6
+
7
+ def verify_app_secret(credentials: HTTPAuthorizationCredentials = Depends(security)):
8
+ if credentials.credentials != APP_SECRET:
9
+ raise HTTPException(status_code=403, detail="Invalid APP_SECRET")
10
+ return credentials.credentials
api/config.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from dotenv import load_dotenv
3
+
4
+ load_dotenv()
5
+
6
+ BASE_URL = "https://www.blackbox.ai"
7
+ headers = {
8
+ 'accept': '*/*',
9
+ 'accept-language': 'zh-CN,zh;q=0.9',
10
+ 'origin': 'https://www.blackbox.ai',
11
+ 'priority': 'u=1, i',
12
+ 'sec-ch-ua': '"Google Chrome";v="129", "Not=A?Brand";v="8", "Chromium";v="129"',
13
+ 'sec-ch-ua-mobile': '?0',
14
+ 'sec-ch-ua-platform': '"Windows"',
15
+ 'sec-fetch-dest': 'empty',
16
+ 'sec-fetch-mode': 'cors',
17
+ 'sec-fetch-site': 'same-origin',
18
+ 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36',
19
+ }
20
+ APP_SECRET = os.getenv("APP_SECRET")
21
+ ALLOWED_MODELS = [
22
+ {"id": "gpt-4o", "name": "gpt-4o"},
23
+ {"id": "gemini-1.5-pro-latest", "name": "gemini-pro"},
24
+ {"id": "gemini-1.5-pro", "name": "gemini-pro"},
25
+ {"id": "gemini-pro", "name": "gemini-pro"},
26
+ {"id": "claude-3-5-sonnet-20240620", "name": "claude-sonnet-3.5"},
27
+ {"id": "claude-3-5-sonnet", "name": "claude-sonnet-3.5"},
28
+ ]
29
+ MODEL_MAPPING = {
30
+ "gpt-4o":"gpt-4o",
31
+ "gemini-1.5-pro-latest": "gemini-pro",
32
+ "gemini-1.5-pro":"gemini-1.5-pro",
33
+ "gemini-pro":"gemini-pro",
34
+ "claude-3-5-sonnet-20240620":"claude-sonnet-3.5",
35
+ "claude-3-5-sonnet":"claude-sonnet-3.5",
36
+ }
api/logger.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+
3
+ def setup_logger(name):
4
+ logger = logging.getLogger(name)
5
+ if not logger.handlers:
6
+ logger.setLevel(logging.INFO)
7
+ formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
8
+
9
+ # 控制台处理器
10
+ console_handler = logging.StreamHandler()
11
+ console_handler.setFormatter(formatter)
12
+ logger.addHandler(console_handler)
13
+
14
+ # 文件处理器 - 错误级别
15
+ # error_file_handler = logging.FileHandler('error.log')
16
+ # error_file_handler.setFormatter(formatter)
17
+ # error_file_handler.setLevel(logging.ERROR)
18
+ # logger.addHandler(error_file_handler)
19
+
20
+ return logger
api/models.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+ from pydantic import BaseModel
3
+
4
+
5
+ class Message(BaseModel):
6
+ role: str
7
+ content: str | list
8
+
9
+
10
+ class ChatRequest(BaseModel):
11
+ model: str
12
+ messages: List[Message]
13
+ stream: Optional[bool] = False
14
+ temperature: Optional[float] = 0.7
15
+ top_p: Optional[float] = 0.9
16
+ max_tokens: Optional[int] = 8192
api/routes.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ from fastapi import APIRouter, Depends, HTTPException, Request, Response
3
+ from fastapi.responses import StreamingResponse
4
+ from api.auth import verify_app_secret
5
+ from api.config import ALLOWED_MODELS
6
+ from api.models import ChatRequest
7
+ from api.utils import process_non_streaming_response, process_streaming_response
8
+ from api.logger import setup_logger
9
+
10
+ logger = setup_logger(__name__)
11
+
12
+ router = APIRouter()
13
+
14
+ @router.options("/v1/chat/completions")
15
+ @router.options("/api/v1/chat/completions")
16
+ async def chat_completions_options():
17
+ return Response(
18
+ status_code=200,
19
+ headers={
20
+ "Access-Control-Allow-Origin": "*",
21
+ "Access-Control-Allow-Methods": "POST, OPTIONS",
22
+ "Access-Control-Allow-Headers": "Content-Type, Authorization",
23
+ },
24
+ )
25
+
26
+ @router.get("/v1/models")
27
+ @router.get("/api/v1/models")
28
+ async def list_models():
29
+ return {"object": "list", "data": ALLOWED_MODELS}
30
+
31
+ @router.post("/v1/chat/completions")
32
+ @router.post("/api/v1/chat/completions")
33
+ async def chat_completions(
34
+ request: ChatRequest, app_secret: str = Depends(verify_app_secret)
35
+ ):
36
+ logger.info("Entering chat_completions route")
37
+ logger.info(f"Received request: {request}")
38
+ logger.info(f"App secret: {app_secret}")
39
+ logger.info(f"Received chat completion request for model: {request.model}")
40
+
41
+ if request.model not in [model["id"] for model in ALLOWED_MODELS]:
42
+ raise HTTPException(
43
+ status_code=400,
44
+ detail=f"Model {request.model} is not allowed. Allowed models are: {', '.join(model['id'] for model in ALLOWED_MODELS)}",
45
+ )
46
+
47
+ if request.stream:
48
+ logger.info("Streaming response")
49
+ return StreamingResponse(process_streaming_response(request), media_type="text/event-stream")
50
+ else:
51
+ logger.info("Non-streaming response")
52
+ return await process_non_streaming_response(request)
53
+
54
+
55
+ @router.route('/')
56
+ @router.route('/healthz')
57
+ @router.route('/ready')
58
+ @router.route('/alive')
59
+ @router.route('/status')
60
+ @router.get("/health")
61
+ def health_check(request: Request):
62
+ return Response(content=json.dumps({"status": "ok"}), media_type="application/json")
api/utils.py ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from http.client import HTTPException
3
+ import json
4
+ from typing import Any, Dict, Optional
5
+ import uuid
6
+
7
+ import httpx
8
+ from api.config import MODEL_MAPPING, headers
9
+ from fastapi import Depends, security
10
+ from fastapi.security import HTTPAuthorizationCredentials
11
+
12
+ from api.config import APP_SECRET, BASE_URL
13
+ from api.models import ChatRequest
14
+
15
+ from api.logger import setup_logger
16
+
17
+ logger = setup_logger(__name__)
18
+
19
+
20
+ def create_chat_completion_data(
21
+ content: str, model: str, timestamp: int, finish_reason: Optional[str] = None
22
+ ) -> Dict[str, Any]:
23
+ return {
24
+ "id": f"chatcmpl-{uuid.uuid4()}",
25
+ "object": "chat.completion.chunk",
26
+ "created": timestamp,
27
+ "model": model,
28
+ "choices": [
29
+ {
30
+ "index": 0,
31
+ "delta": {"content": content, "role": "assistant"},
32
+ "finish_reason": finish_reason,
33
+ }
34
+ ],
35
+ "usage": None,
36
+ }
37
+
38
+
39
+ def verify_app_secret(credentials: HTTPAuthorizationCredentials = Depends(security)):
40
+ if credentials.credentials != APP_SECRET:
41
+ raise HTTPException(status_code=403, detail="Invalid APP_SECRET")
42
+ return credentials.credentials
43
+
44
+
45
+ def message_to_dict(message):
46
+ if isinstance(message.content, str):
47
+ return {"role": message.role, "content": message.content}
48
+ elif isinstance(message.content, list) and len(message.content) == 2:
49
+ return {
50
+ "role": message.role,
51
+ "content": message.content[0]["text"],
52
+ "data": {
53
+ "imageBase64": message.content[1]["image_url"]["url"],
54
+ "fileText": "",
55
+ "title": "snapshoot",
56
+ },
57
+ }
58
+ else:
59
+ return {"role": message.role, "content": message.content}
60
+
61
+
62
+ async def process_streaming_response(request: ChatRequest):
63
+ json_data = {
64
+ "messages": [message_to_dict(msg) for msg in request.messages],
65
+ "previewToken": None,
66
+ "userId": None,
67
+ "codeModelMode": True,
68
+ "agentMode": {},
69
+ "trendingAgentMode": {},
70
+ "isMicMode": False,
71
+ "userSystemPrompt": None,
72
+ "maxTokens": request.max_tokens,
73
+ "playgroundTopP": request.top_p,
74
+ "playgroundTemperature": request.temperature,
75
+ "isChromeExt": False,
76
+ "githubToken": None,
77
+ "clickedAnswer2": False,
78
+ "clickedAnswer3": False,
79
+ "clickedForceWebSearch": False,
80
+ "visitFromDelta": False,
81
+ "mobileClient": False,
82
+ "userSelectedModel": MODEL_MAPPING.get(request.model),
83
+ }
84
+
85
+ async with httpx.AsyncClient() as client:
86
+ try:
87
+ async with client.stream(
88
+ "POST",
89
+ f"{BASE_URL}/api/chat",
90
+ headers=headers,
91
+ json=json_data,
92
+ timeout=100,
93
+ ) as response:
94
+ response.raise_for_status()
95
+ async for line in response.aiter_lines():
96
+ timestamp = int(datetime.now().timestamp())
97
+ if line:
98
+ content = line + "\n"
99
+ if content.startswith("$@$v=undefined-rv1$@$"):
100
+ yield f"data: {json.dumps(create_chat_completion_data(content[21:], request.model, timestamp))}\n\n"
101
+ else:
102
+ yield f"data: {json.dumps(create_chat_completion_data(content, request.model, timestamp))}\n\n"
103
+
104
+ yield f"data: {json.dumps(create_chat_completion_data('', request.model, timestamp, 'stop'))}\n\n"
105
+ yield "data: [DONE]\n\n"
106
+ except httpx.HTTPStatusError as e:
107
+ logger.error(f"HTTP error occurred: {e}")
108
+ raise HTTPException(status_code=e.response.status_code, detail=str(e))
109
+ except httpx.RequestError as e:
110
+ logger.error(f"Error occurred during request: {e}")
111
+ raise HTTPException(status_code=500, detail=str(e))
112
+
113
+
114
+ async def process_non_streaming_response(request: ChatRequest):
115
+ json_data = {
116
+ "messages": [message_to_dict(msg) for msg in request.messages],
117
+ "previewToken": None,
118
+ "userId": None,
119
+ "codeModelMode": True,
120
+ "agentMode": {},
121
+ "trendingAgentMode": {},
122
+ "isMicMode": False,
123
+ "userSystemPrompt": None,
124
+ "maxTokens": request.max_tokens,
125
+ "playgroundTopP": request.top_p,
126
+ "playgroundTemperature": request.temperature,
127
+ "isChromeExt": False,
128
+ "githubToken": None,
129
+ "clickedAnswer2": False,
130
+ "clickedAnswer3": False,
131
+ "clickedForceWebSearch": False,
132
+ "visitFromDelta": False,
133
+ "mobileClient": False,
134
+ "userSelectedModel": MODEL_MAPPING.get(request.model),
135
+ }
136
+ full_response = ""
137
+ async with httpx.AsyncClient() as client:
138
+ async with client.stream(
139
+ method="POST", url=f"{BASE_URL}/api/chat", headers=headers, json=json_data
140
+ ) as response:
141
+ async for chunk in response.aiter_text():
142
+ full_response += chunk
143
+ if full_response.startswith("$@$v=undefined-rv1$@$"):
144
+ full_response = full_response[21:]
145
+ return {
146
+ "id": f"chatcmpl-{uuid.uuid4()}",
147
+ "object": "chat.completion",
148
+ "created": int(datetime.now().timestamp()),
149
+ "model": request.model,
150
+ "choices": [
151
+ {
152
+ "index": 0,
153
+ "message": {"role": "assistant", "content": full_response},
154
+ "finish_reason": "stop",
155
+ }
156
+ ],
157
+ "usage": None,
158
+ }
main.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import uvicorn
2
+ from api.app import app
3
+
4
+ if __name__ == "__main__":
5
+ uvicorn.run(app, host="0.0.0.0", port=8001)
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ fastapi
2
+ httpx
3
+ pydantic
4
+ pyinstaller
5
+ python-dotenv
6
+ starlette
7
+ uvicorn