"""问答路由"""
import json
from fastapi import APIRouter, HTTPException, Depends
from fastapi.responses import StreamingResponse
from app.models.schemas import ChatRequest, ChatStreamChunk
from app.services.embedding import EmbeddingService
from app.services.retrieval import RetrievalService
from app.services.chat import ChatService
from app.core.dependencies import (
    get_embedding_service,
    get_retrieval_service,
    get_chat_service
)

router = APIRouter(prefix="/api/chat", tags=["chat"])


@router.post("/stream")
async def chat_stream(
    request: ChatRequest,
    embedding_service: EmbeddingService = Depends(get_embedding_service),
    retrieval_service: RetrievalService = Depends(get_retrieval_service),
    chat_service: ChatService = Depends(get_chat_service)
):
    """
    流式问答接口
    
    - 接收用户问题
    - 将问题转换为嵌入向量
    - 检索相似度最高的文本块
    - 使用 GPT 模型生成回答
    - 流式返回回答内容
    """
    try:
        # 生成问题的嵌入向量
        query_embedding = await embedding_service.generate_embedding(request.question)
        
        # 全局检索最相关的文本块
        top_chunks, top_similarities = await retrieval_service.retrieve_top_chunks(
            query_embedding
        )
        
        if not top_chunks:
            raise HTTPException(status_code=404, detail="未找到相关文档")
        
        # 提取文本内容
        context = [chunk["content"] for chunk in top_chunks]
        
        async def event_generator():
            """生成 Server-Sent Events 流"""
            try:
                # 发送上下文信息（最高相似度及检索到的文本块）
                context_chunk = ChatStreamChunk(
                    type="context",
                    content=top_chunks[0]["content"],  # 发送最相关的文本块内容
                    similarity=float(top_similarities[0])
                )
                yield f"data: {context_chunk.model_dump_json(exclude_none=True)}\n\n"
                
                # 流式生成回答
                async for token in chat_service.generate_answer_stream(request.question, context):
                    token_chunk = ChatStreamChunk(
                        type="token",
                        content=token
                    )
                    yield f"data: {token_chunk.model_dump_json(exclude_none=True)}\n\n"
                
                # 发送完成信号
                done_chunk = ChatStreamChunk(type="done")
                yield f"data: {done_chunk.model_dump_json(exclude_none=True)}\n\n"
            
            except Exception as e:
                error_data = {"type": "error", "content": str(e)}
                yield f"data: {json.dumps(error_data)}\n\n"
        
        return StreamingResponse(
            event_generator(),
            media_type="text/event-stream"
        )
    
    except HTTPException:
        raise
    except Exception as e:
        raise HTTPException(status_code=500, detail=f"问答处理失败: {str(e)}")

