from fastapi import APIRouter
from fastapi.responses import StreamingResponse
import requests
from model.chat.chat_message import RobotChatMessage, LLMChatMessage

llm_router = APIRouter(prefix="/chat", tags=["大模型对话"])

API_URL = "https://api.deepseek.com/v1/chat/completions"
API_KEY = "sk-8ee893ebae0c46e4bbb4da914bb26989"
MODEL = "deepseek-chat"

@llm_router.post("/completions")
def chat_completions(message: LLMChatMessage):
    headers = {
        'Authorization': f'Bearer {API_KEY}',
        'Content-Type': 'application/json'
    }

    if not message.model:
        message.model = MODEL

    def stream():
        with requests.post(API_URL, json=message.model_dump(), headers=headers, stream=True) as response:
            response.raise_for_status()
            for line in response.iter_lines():
                if line:
                    line_str = line.decode('utf-8').lstrip('data: ')
                    if line_str == '[DONE]':
                        yield 'data: [DONE]\n\n'
                        break
                    yield f'data: {line_str}\n\n'
    return StreamingResponse(
        stream(),
        media_type="text/event-stream",
        headers={
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "X-Accel-Buffering": "no",
        },
    )

