"""对话核心API路由：处理用户对话请求"""
from fastapi import APIRouter, HTTPException

from typing import Optional
from models.request import ChatRequest
from models.response import ChatHistoryResponse, ChatHistoryItem, ApiResponse  # 导入ApiResponse
from core.llm_client import get_llm_stream_response, get_llm_response
from core.stream_handler import stream_llm_response
from db.chat_record import save_chat_record, get_chat_history, get_chat_count
from db.chat_session import get_session_by_id, create_chat_session, update_session_updated_at
from config.constants import ROLE_USER, ROLE_ASSISTANT
import json
from fastapi.responses import StreamingResponse
# 创建路由实例
chat_router = APIRouter(prefix="/chat-api/chat", tags=["对话核心"])


@chat_router.post("")
async def chat(request: ChatRequest):
    """核心对话接口：接收用户提问，返回大模型响应"""
    # 1. 处理会话ID（原有逻辑不变）
    session_id = request.session_id
    if not session_id:
        session = create_chat_session(
            user_id=request.user_id,
            title=request.query[:30]
        )
        session_id = session["session_id"]
    else:
        session = get_session_by_id(session_id=session_id, user_id=request.user_id)
        if not session:
            raise HTTPException(
                status_code=403,
                detail="会话不存在或无权限访问（请检查session_id）"
            )

    # 2. 存储用户当前提问（原有逻辑不变）
    save_chat_record(
        session_id=session_id,
        user_id=request.user_id,
        role=ROLE_USER,
        content=request.query
    )

    # 3. 处理历史对话（原有逻辑不变）
    history = request.history
    if request.history_len > 0:
        history = history[-2 * request.history_len:]

    # 4. 调用大模型获取响应
    if request.stream:
        # 流式处理（保留但不使用）
        llm_stream = await get_llm_stream_response(
            user_query=request.query,
            history=history,
            sys_prompt=request.sys_prompt,
            temperature=request.temperature,
            top_p=request.top_p,
            max_token=request.max_token
        )
        return await stream_llm_response(
            llm_stream=llm_stream,
            session_id=session_id,
            user_id=request.user_id
        )
    else:
        # 非流式处理（修改响应格式）
        llm_response = await get_llm_response(
            user_query=request.query,
            history=history,
            sys_prompt=request.sys_prompt,
            temperature=request.temperature,
            top_p=request.top_p,
            max_token=request.max_token
        )
        
        # 提取完整回复
        full_reply = llm_response.choices[0].message.content
        
        # 保存AI回复到数据库（原有逻辑不变）
        save_chat_record(
            session_id=session_id,
            user_id=request.user_id,
            role=ROLE_ASSISTANT,
            content=full_reply
        )
        update_session_updated_at(session_id=session_id)
        
        # 关键修改：用ApiResponse包装返回结果，符合前端拦截器要求
        return ApiResponse(
            code=200,  # 状态码必须为200
            data=full_reply,  # 实际数据是AI的回复内容
            msg="对话成功"  # 提示信息
        )

    
@chat_router.get("/history")
async def get_chat_history_api(
    session_id: str,
    user_id: str,
    limit: Optional[int] = None
) -> ApiResponse[ChatHistoryResponse]:
    """获取对话历史记录"""
    # 验证会话归属
    session = get_session_by_id(session_id=session_id, user_id=user_id)
    if not session:
        raise HTTPException(status_code=404, detail="会话不存在或无权限访问")

    # 获取历史记录
    history_records = get_chat_history(
        session_id=session_id,
        user_id=user_id,
        limit=limit
    )
    history_items = [ChatHistoryItem(**record) for record in history_records]

    # 包装成统一响应格式
    return ApiResponse(
        data=ChatHistoryResponse(
            session_id=session_id,
            history=history_items,
            total_count=get_chat_count(session_id=session_id, user_id=user_id)
        ),
        msg="历史记录获取成功"
    )
