from fastapi import APIRouter, Depends
from app.dependencies import get_llm_model
from app.models.llm import TinyLlamaModel
from pydantic import BaseModel

router = APIRouter()

class ChatRequest(BaseModel):
    message: str
    system_prompt: str = None

class ChatResponse(BaseModel):
    response: str

@router.post("/chat", response_model=ChatResponse)
async def chat(
    request: ChatRequest,
    model: TinyLlamaModel = Depends(get_llm_model)
):
    """聊天接口"""
    messages = [{"role": "user", "content": request.message}]
    response = model.generate_chat_response(
        messages,
        system_prompt=request.system_prompt
    )
    return ChatResponse(response=response) 