# main.py

import json
import uuid
from typing import Optional, List, Dict, Any

import uvicorn
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from langchain_core.messages import HumanMessage, ToolMessage, AIMessage

# 导入编译好的Graph应用
from my_hitl_app.graph import app_graph

# FastAPI 应用实例
api = FastAPI(
    title="Stateful Chat Service with Human-in-the-Loop",
    description="一个统一的/chat路由，处理需要人工审批的复杂任务",
)


# Pydantic模型用于请求和响应体
class ChatRequest(BaseModel):
    question: str
    thread_id: Optional[str] = None


class ChatResponse(BaseModel):
    thread_id: str
    status: str  # 'finished', 'paused_for_approval'
    response: Optional[str] = None
    tool_calls: Optional[List[Dict[str, Any]]] = None


@api.post("/chat", response_model=ChatResponse)
def chat(request: ChatRequest):
    """
    处理聊天请求。它可以发起新对话，也可以在需要人工审批时继续现有对话。
    - 首次请求: 提供 'question'。
    - 后续审批: 提供 'thread_id' 和 'question' (内容为 'approve', 'reject', 或修改后的指令)。
    """
    thread_id = request.thread_id or str(uuid.uuid4())
    config = {"configurable": {"thread_id": thread_id}}

    print(f"Chat thread id: {thread_id}")

    # 获取当前状态
    current_state = app_graph.get_state(config)

    if request.thread_id and not current_state:
        raise HTTPException(status_code=404, detail="会话未找到或已过期")

    # 检查对话是否处于中断状态
    is_interrupted = current_state and current_state.next
    print(f"Is interrupted? {is_interrupted}")

    if is_interrupted:
        # --- 情况A: 对话已中断，等待用户输入 (approve/reject) ---
        print(f"--- Conversation is paused, current input: {request.question} ---")

        if request.question.lower() == "approve":
            print("--- User approved. Resuming with invoke(None, config) ---")
            # 【修复】这里不再依赖invoke的返回值，而是直接让Graph执行，
            # 然后从检查点中获取最新的完整状态。
            app_graph.invoke(None, config)
            output_state = app_graph.get_state(config)

        else:
            print("--- User rejected/modified. Resuming with feedback ToolMessage ---")

            last_message = current_state.values.get("messages", [])[-1]
            if not isinstance(last_message, AIMessage) or not last_message.tool_calls:
                raise HTTPException(status_code=400, detail="No pending tool calls to reject.")

            rejection_message = f"用户拒绝了此操作。用户反馈: '{request.question}'"
            tool_messages = [
                ToolMessage(content=rejection_message, tool_call_id=call["id"])
                for call in last_message.tool_calls
            ]

            app_graph.invoke({"messages": tool_messages}, config)
            output_state = app_graph.get_state(config)

    else:
        # --- 情况B: 正常对话流程，非中断状态 ---
        print("--- Starting new conversation or continuing non-interrupted flow ---")
        initial_input = {"messages": [HumanMessage(content=request.question)]}
        app_graph.invoke(initial_input, config)

        # 获取最新的状态，这包括了中断信息
        output_state = app_graph.get_state(config)

    # --- 根据最终状态构建响应 ---
    final_next_step = output_state.next if output_state else None
    print(f"Final output state next: {final_next_step}")

    if not final_next_step:
        # Graph执行完毕
        messages = output_state.values.get("messages") if output_state else []
        final_answer = messages[-1].content if messages else ""
        return ChatResponse(
            thread_id=thread_id,
            status="finished",
            response=final_answer
        )
    else:
        # Graph再次中断，等待下一次审批
        last_message = output_state.values.get("messages", [])[-1]

        if not isinstance(last_message, AIMessage) or not last_message.tool_calls:
            raise HTTPException(status_code=500,
                                detail="Graph paused, but last message is not an AIMessage with tool calls.")

        tool_call = last_message.tool_calls
        # 1. 把 tool_call 渲染成 Markdown 列表
        md_tools = []
        for tc in tool_call:
            name = tc["name"]
            args = tc["args"]
            # 把 args 转成漂亮的 JSON 字符串
            args_json = json.dumps(args, ensure_ascii=False, indent=2)
            md_tools.append(f"- **{name}**\n  ```json\n{args_json}\n```")

        tools_md = "\n".join(md_tools)

        # 2. 拼接成 AI 助手式的回复
        response_md = f"""我即将调用以下工具，请您审核并输入 **approve** 或 **reject**（可附带理由）：
        {tools_md}
        > 例如：`approve` 或 `reject 因为……`
        """

        return ChatResponse(
            thread_id=thread_id,
            status="paused_for_approval",
            response=response_md,
        )


if __name__ == "__main__":
    print("API服务已准备就绪。")
    print("在浏览器中打开 http://127.0.0.1:8000/docs 查看API文档。")
    uvicorn.run(api, host="0.0.0.0", port=8000)
