# 在顶部添加
from models.vllm_service import VLLMServer
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from agent.agent_state import init_state
from agent.langgraph_workflow import AgentWorkflow
from knowledge_base.knowledge_builder import KnowledgeBuilder
from retrieval.hybrid_retriever import HybridRetriever
import os




app = FastAPI()

# 在启动服务前初始化vLLM
@app.on_event("startup")
async def startup_event():
    # 确保模型存在
    from models.download_model import download_model
    model_path = download_model()

    # 初始化vLLM（后台预加载模型）
    VLLMServer(model_path)
    print("vLLM引擎初始化完成")

    # 初始化知识库
    if not os.path.exists("knowledge_base/vector_db/chroma.sqlite3"):
        kb.build_from_web()

# 初始化组件
kb = KnowledgeBuilder()
# 如果向量数据库不存在，则构建知识库
if not os.path.exists("knowledge_base/vector_db/chroma.sqlite3"):
    kb.build_from_web()

retriever = HybridRetriever(kb.vector_db)
agent = AgentWorkflow(retriever)


class ChatRequest(BaseModel):
    session_id: str
    message: str


class ChatResponse(BaseModel):
    response: str


@app.post("/chat", response_model=ChatResponse)
async def chat_endpoint(request: ChatRequest):
    """聊天端点"""
    # 初始化状态
    state = init_state(request.session_id, request.message)

    # 运行工作流
    state = await agent.arun(state)

    return {"response": state["response"]}


@app.post("/update_knowledge")
async def update_knowledge():
    """手动触发知识库更新"""
    kb.build_from_web()
    return {"status": "知识库已更新"}


if __name__ == "__main__":
    import uvicorn

    uvicorn.run(app, host="0.0.0.0", port=8000)