import asyncio, json
from typing import TypedDict
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from langgraph.constants import START, END
from langgraph.graph import StateGraph

class Mystate(TypedDict):
    aaa: str

# 全局唯一消息队列
msg_queue: asyncio.Queue = asyncio.Queue()

# 节点函数
async def step1(state: Mystate):
    for i in range(5):
        await msg_queue.put({"type": "progress", "loop": i})
        await asyncio.sleep(0.5)
    state["aaa"] = "我来了 step1"
    await msg_queue.put({"type": "graph", "node": "step1", "state": dict(state)})
    return state

async def step2(state: Mystate):
    await asyncio.sleep(0.5)
    state["aaa"] = "我来了 step2"
    await msg_queue.put({"type": "graph", "node": "step2", "state": dict(state)})
    return state

# 构建 Graph
graph = StateGraph(Mystate)
graph.add_node("step1", step1)
graph.add_node("step2", step2)
graph.add_edge(START, "step1")
graph.add_edge("step1", "step2")
graph.add_edge("step2", END)
lg_app = graph.compile()

app = FastAPI()

@app.get("/stream")
async def stream():
    async def event_generator():
        # 异步执行 Graph
        async def run_graph():
            await lg_app.ainvoke({})
            await msg_queue.put({"type": "DONE"})  # 结束标识

        asyncio.create_task(run_graph())

        while True:
            msg = await msg_queue.get()
            # 推送 SSE
            yield f"data: {json.dumps(msg, ensure_ascii=False)}\n\n"
            if msg.get("type") == "DONE":
                break

    return StreamingResponse(event_generator(), media_type="text/event-stream")

if __name__ == "__main__":
    import uvicorn
    uvicorn.run(app, host="0.0.0.0", port=8002)
