Spaces:
Paused
Paused
| from fastapi import FastAPI, HTTPException, Depends, Security | |
| from fastapi.security import APIKeyHeader | |
| from fastapi.responses import JSONResponse | |
| from pydantic import BaseModel | |
| import requests | |
| import uuid | |
| import os | |
| from datetime import datetime | |
| from fastapi.middleware.cors import CORSMiddleware | |
| from dotenv import load_dotenv | |
| # Load environment variables | |
| load_dotenv() | |
| # Initialize FastAPI app | |
| app = FastAPI( | |
| title="MultiChatAI to OpenAI API Wrapper", | |
| description="API wrapper for MultiChatAI with OpenAI-compatible endpoints", | |
| version="1.0.0", | |
| docs_url="/docs", | |
| redoc_url=None | |
| ) | |
| # Configuration | |
| API_KEY_NAME = "X-API-KEY" | |
| API_KEYS = os.getenv("API_KEYS", "").split(",") # Comma-separated list from .env | |
| # Configure CORS | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=["*"], | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| # Security setup | |
| api_key_header = APIKeyHeader(name=API_KEY_NAME, auto_error=False) | |
| async def get_api_key(api_key: str = Security(api_key_header)): | |
| if not api_key: | |
| raise HTTPException(status_code=401, detail="API key is missing") | |
| if api_key not in API_KEYS: | |
| raise HTTPException(status_code=401, detail="Invalid API key") | |
| return api_key | |
| # Request models | |
| class ChatMessage(BaseModel): | |
| role: str | |
| content: str | |
| class ChatCompletionRequest(BaseModel): | |
| model: str = "deepseek-ai/DeepSeek-R1" | |
| messages: list[ChatMessage] | |
| temperature: float = 0.7 | |
| max_tokens: int = None | |
| # Health check endpoint | |
| async def health_check(): | |
| return { | |
| "status": "OK", | |
| "service": "MultiChatAI Proxy", | |
| "timestamp": datetime.now().isoformat(), | |
| "environment": os.getenv("ENVIRONMENT", "development") | |
| } | |
| # Main API endpoint | |
| async def chat_completion( | |
| request: ChatCompletionRequest, | |
| api_key: str = Depends(get_api_key) | |
| ): | |
| try: | |
| # Prepare request for MultiChatAI | |
| multi_chat_body = { | |
| "chatSettings": { | |
| "model": request.model, | |
| "prompt": "You are a helpful AI assistant.", | |
| "temperature": request.temperature, | |
| "contextLength": 32000, | |
| "includeProfileContext": True, | |
| "includeWorkspaceInstructions": True, | |
| "embeddingsProvider": "openai" | |
| }, | |
| "messages": [ | |
| {"role": "system", "content": f"Today is {datetime.now().strftime('%m/%d/%Y')}.\nYou are a helpful AI assistant."}, | |
| *[{"role": msg.role, "content": msg.content} for msg in request.messages] | |
| ], | |
| "customModelId": "" | |
| } | |
| # Call MultiChatAI API | |
| response = requests.post( | |
| "https://www.multichatai.com/api/chat/deepinfra", | |
| headers={"Content-Type": "application/json"}, | |
| json=multi_chat_body, | |
| timeout=30 | |
| ) | |
| response.raise_for_status() | |
| return JSONResponse({ | |
| "id": f"chatcmpl-{uuid.uuid4()}", | |
| "object": "chat.completion", | |
| "created": int(datetime.now().timestamp()), | |
| "model": request.model, | |
| "choices": [{ | |
| "index": 0, | |
| "message": { | |
| "role": "assistant", | |
| "content": response.text.strip() | |
| }, | |
| "finish_reason": "stop" | |
| }], | |
| "usage": { | |
| "prompt_tokens": 0, | |
| "completion_tokens": 0, | |
| "total_tokens": 0 | |
| } | |
| }) | |
| except requests.Timeout: | |
| raise HTTPException(status_code=504, detail="Upstream service timeout") | |
| except requests.RequestException as e: | |
| raise HTTPException( | |
| status_code=502, | |
| detail=f"Upstream service error: {str(e)}" | |
| ) | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # Add this if you need to support OPTIONS requests | |
| async def options_handler(): | |
| return JSONResponse(content={}, status_code=200) | |
| # For production deployment | |
| def get_application(): | |
| return app | |
| # For running locally | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run( | |
| "app:app", | |
| host="0.0.0.0", | |
| port=int(os.getenv("PORT", 7860)), | |
| reload=os.getenv("RELOAD", "false").lower() == "true" | |
| ) |