feat: remove streaming
Browse files
README.md
CHANGED
|
@@ -93,3 +93,4 @@ A sample FastAPI server is provided in the `server.py` file. Note that this uses
|
|
| 93 |
## TODO
|
| 94 |
|
| 95 |
- [ ] Implement memory in the chatbot
|
|
|
|
|
|
| 93 |
## TODO
|
| 94 |
|
| 95 |
- [ ] Implement memory in the chatbot
|
| 96 |
+
- [ ] Fix streaming issues in the UI - for some reason, I can't figure out LangGraph with Chainlit
|
app.py
CHANGED
|
@@ -19,15 +19,11 @@ async def main(message: cl.Message):
|
|
| 19 |
|
| 20 |
msg = cl.Message(content="")
|
| 21 |
|
| 22 |
-
|
| 23 |
{"question": message.content},
|
| 24 |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
event["event"] == "on_chat_model_stream"
|
| 29 |
-
and event["metadata"]["langgraph_node"] == "generate"
|
| 30 |
-
):
|
| 31 |
-
await msg.stream_token(event["data"]["chunk"].content)
|
| 32 |
|
| 33 |
await msg.send()
|
|
|
|
| 19 |
|
| 20 |
msg = cl.Message(content="")
|
| 21 |
|
| 22 |
+
res = graph.invoke(
|
| 23 |
{"question": message.content},
|
| 24 |
config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
msg.content = res["generation"].content
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
|
| 29 |
await msg.send()
|