
from fastapi import FastAPI, Request
from . import mock_tools, schemas
from fastapi.responses import StreamingResponse
from fastapi.staticfiles import StaticFiles

from fastapi_mcp import FastApiMCP

app = FastAPI()

@app.post("/hotel/recommend", response_model=schemas.HotelRecommendResponse, operation_id="hotel_recommend")
def hotel_recommend(req: schemas.HotelRecommendRequest):
    return mock_tools.recommend_hotels(req)


@app.post("/hotel/order", response_model=schemas.HotelOrderResponse, operation_id="hotel_order")
def hotel_order(req: schemas.HotelOrderRequest):
    return mock_tools.send_hotel_order(req)


@app.post("/weather/query", response_model=schemas.WeatherQueryResponse, operation_id="weather_query")
def weather_query(req: schemas.WeatherQueryRequest):
    return mock_tools.query_weather(req)


@app.post("/train/query", response_model=schemas.TrainTicketQueryResponse, operation_id="train_query")
def train_query(req: schemas.TrainTicketQueryRequest):
    return mock_tools.query_train_tickets(req)


@app.post("/train/order", response_model=schemas.TrainOrderResponse, operation_id="train_order")
def train_order(req: schemas.TrainOrderRequest):
    return mock_tools.send_train_order(req)


@app.post("/sight/recommend", response_model=schemas.SightRecommendResponse, operation_id="sight_recommend")
def sight_recommend(req: schemas.SightRecommendRequest):
    return mock_tools.recommend_sights(req)


@app.post("/sight/info", response_model=schemas.SightInfoResponse, operation_id="sight_info")
def sight_info(req: schemas.SightInfoRequest):
    return mock_tools.get_sight_info(req)


@app.post("/plan/make", response_model=schemas.TravelPlanResponse, operation_id="plan_make")
def plan_make(req: schemas.TravelPlanRequest):
    return mock_tools.make_travel_plan(req)


@app.post("/plan/make/stream", operation_id="plan_make_stream")
async def plan_make_stream(req: schemas.TravelPlanRequest):
    async def event_generator():
        async for chunk in mock_tools.make_travel_plan_stream(req):
            yield f"data: {chunk}\n\n"
    return StreamingResponse(event_generator(), media_type="text/event-stream")


@app.post("/agent/chat", operation_id="agent_chat")
async def agent_chat(request: Request):
    data = await request.json()
    question = data.get("question", "")
    # 这里调用 deepseek LLM，传入工具描述和用户问题，让 LLM 自动规划
    answer = await mock_tools.llm_agent_answer(question)
    return {"answer": answer}


# 新增流式对话接口，兼容前端 /agent/chat/stream
@app.post("/agent/chat/stream", operation_id="agent_chat_stream")
async def agent_chat_stream(request: Request):
    data = await request.json()
    question = data.get("question", "")
    # 这里简单用 make_travel_plan_stream 作为演示（如需多轮流式可后续完善）
    # 你可以根据实际业务将 question 转为 TravelPlanRequest 或直接流式 LLM agent
    from .schemas import TravelPlanRequest
    # 简单兜底：用 question 作为 location，date 填默认
    req = TravelPlanRequest(locations=[question], dates=["2025-06-24"])
    async def event_generator():
        async for chunk in mock_tools.make_travel_plan_stream(req):
            yield chunk
    return StreamingResponse(event_generator(), media_type="text/event-stream")

# --- MCP集成部分 ---
mcp = FastApiMCP(app)
mcp.mount()
mcp.setup_server()

# 挂载静态文件目录
app.mount("/", StaticFiles(directory="./frontend", html=True), name="static")
