test24 / api /utils.py
Niansuh's picture
Update api/utils.py
901e507 verified
raw
history blame
7.89 kB
from datetime import datetime
import json
import uuid
from typing import Any, Dict, Optional
import httpx
from fastapi import Depends, HTTPException
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from api import validate
from api.config import (
APP_SECRET,
BASE_URL,
MODEL_MAPPING,
AGENT_MODE,
TRENDING_AGENT_MODE,
MODEL_PREFIXES,
headers,
)
from api.models import ChatRequest
from api.logger import setup_logger
logger = setup_logger(__name__)
security = HTTPBearer()
def create_chat_completion_data(
content: str, model: str, timestamp: int, finish_reason: Optional[str] = None
) -> Dict[str, Any]:
return {
"id": f"chatcmpl-{uuid.uuid4()}",
"object": "chat.completion.chunk",
"created": timestamp,
"model": model,
"choices": [
{
"index": 0,
"delta": {"content": content, "role": "assistant"},
"finish_reason": finish_reason,
}
],
"usage": None,
}
def verify_app_secret(credentials: HTTPAuthorizationCredentials = Depends(security)):
if credentials.credentials != APP_SECRET:
raise HTTPException(status_code=403, detail="Invalid APP_SECRET")
return credentials.credentials
def message_to_dict(message, model_prefix: Optional[str] = None):
content = message.content
if isinstance(content, list) and len(content) == 2 and "image_url" in content[1]:
# Handle messages with image data
content_text = content[0]["text"]
if model_prefix:
content_text = f"{model_prefix} {content_text}"
return {
"role": message.role,
"content": content_text,
"data": {
"imageBase64": content[1]["image_url"]["url"],
"fileText": "",
"title": "snapshot",
},
}
else:
if model_prefix:
content = f"{model_prefix} {content}"
return {"role": message.role, "content": content}
async def process_streaming_response(request: ChatRequest):
agent_mode = AGENT_MODE.get(request.model, {})
trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
model_prefix = MODEL_PREFIXES.get(request.model, "")
hid = validate.getHid()
logger.info(f"Using hid: {hid} for model: {request.model}")
user_selected_model = MODEL_MAPPING.get(request.model, request.model)
logger.info(f"User selected model: {user_selected_model}")
json_data = {
"messages": [message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages],
"previewToken": None,
"userId": None,
"codeModelMode": True,
"agentMode": agent_mode,
"trendingAgentMode": trending_agent_mode,
"isMicMode": False,
"userSystemPrompt": None,
"maxTokens": request.max_tokens,
"playgroundTopP": request.top_p,
"playgroundTemperature": request.temperature,
"isChromeExt": False,
"githubToken": None,
"clickedAnswer2": False,
"clickedAnswer3": False,
"clickedForceWebSearch": False,
"visitFromDelta": False,
"mobileClient": False,
"userSelectedModel": user_selected_model,
"validated": hid
}
async with httpx.AsyncClient() as client:
try:
async with client.stream(
"POST",
f"{BASE_URL}/api/chat",
headers=headers,
json=json_data,
timeout=100,
) as response:
response.raise_for_status()
async for line in response.aiter_lines():
timestamp = int(datetime.now().timestamp())
if line:
content = line
if "https://www.blackbox.ai" in content:
logger.warning("Invalid hid detected, refreshing hid")
hid = validate.getHid()
content = "hid has been refreshed, please retry"
logger.info(f"hid refreshed due to content: {content}")
yield f"data: {json.dumps(create_chat_completion_data(content, request.model, timestamp))}\n\n"
break
if content.startswith("$@$v=undefined-rv1$@$"):
content = content[21:]
yield f"data: {json.dumps(create_chat_completion_data(content, request.model, timestamp))}\n\n"
yield f"data: {json.dumps(create_chat_completion_data('', request.model, timestamp, 'stop'))}\n\n"
yield "data: [DONE]\n\n"
except httpx.HTTPStatusError as e:
logger.error(f"HTTP error occurred: {e}")
raise HTTPException(status_code=e.response.status_code, detail=str(e))
except httpx.RequestError as e:
logger.error(f"Error occurred during request: {e}")
raise HTTPException(status_code=500, detail=str(e))
async def process_non_streaming_response(request: ChatRequest):
agent_mode = AGENT_MODE.get(request.model, {})
trending_agent_mode = TRENDING_AGENT_MODE.get(request.model, {})
model_prefix = MODEL_PREFIXES.get(request.model, "")
hid = validate.getHid()
logger.info(f"Using hid: {hid} for model: {request.model}")
user_selected_model = MODEL_MAPPING.get(request.model, request.model)
logger.info(f"User selected model: {user_selected_model}")
json_data = {
"messages": [message_to_dict(msg, model_prefix=model_prefix) for msg in request.messages],
"previewToken": None,
"userId": None,
"codeModelMode": True,
"agentMode": agent_mode,
"trendingAgentMode": trending_agent_mode,
"isMicMode": False,
"userSystemPrompt": None,
"maxTokens": request.max_tokens,
"playgroundTopP": request.top_p,
"playgroundTemperature": request.temperature,
"isChromeExt": False,
"githubToken": None,
"clickedAnswer2": False,
"clickedAnswer3": False,
"clickedForceWebSearch": False,
"visitFromDelta": False,
"mobileClient": False,
"userSelectedModel": user_selected_model,
"validated": hid
}
full_response = ""
async with httpx.AsyncClient() as client:
try:
async with client.stream(
method="POST", url=f"{BASE_URL}/api/chat", headers=headers, json=json_data
) as response:
response.raise_for_status()
async for chunk in response.aiter_text():
full_response += chunk
except httpx.HTTPStatusError as e:
logger.error(f"HTTP error occurred: {e}")
raise HTTPException(status_code=e.response.status_code, detail=str(e))
except httpx.RequestError as e:
logger.error(f"Error occurred during request: {e}")
raise HTTPException(status_code=500, detail=str(e))
if "https://www.blackbox.ai" in full_response:
logger.warning("Invalid hid detected in response, refreshing hid")
hid = validate.getHid()
full_response = "hid has been refreshed, please retry"
logger.info("hid refreshed due to response content")
if full_response.startswith("$@$v=undefined-rv1$@$"):
full_response = full_response[21:]
return {
"id": f"chatcmpl-{uuid.uuid4()}",
"object": "chat.completion",
"created": int(datetime.now().timestamp()),
"model": request.model,
"choices": [
{
"index": 0,
"message": {"role": "assistant", "content": full_response},
"finish_reason": "stop",
}
],
"usage": None,
}