langgraph-ui / app.py
yoon-gu's picture
Update app.py
f647d10 verified
import gradio as gr
import time
from gradio import ChatMessage
from langchain_core.runnables import RunnableConfig
from langchain_teddynote.messages import random_uuid
from langchain_core.messages import BaseMessage, HumanMessage
from pprint import pprint
from graph import app as workflow
def format_namespace(namespace):
return namespace[-1].split(":")[0] if len(namespace) > 0 else "root graph"
def generate_response(message, history):
inputs = {
"messages": [HumanMessage(content=message)],
}
node_names = []
response = []
for namespace, chunk in workflow.stream(
inputs,
stream_mode="updates", subgraphs=True
):
for node_name, node_chunk in chunk.items():
# node_namesκ°€ λΉ„μ–΄μžˆμ§€ μ•Šμ€ κ²½μš°μ—λ§Œ 필터링
if len(node_names) > 0 and node_name not in node_names:
continue
if len(response) > 0:
response[-1].metadata["status"] = "done"
# print("\n" + "=" * 50)
msg = []
formatted_namespace = format_namespace(namespace)
if formatted_namespace == "root graph":
print(f"πŸ”„ Node: \033[1;36m{node_name}\033[0m πŸ”„")
meta_title = f"πŸ€” `{node_name}`"
else:
print(
f"πŸ”„ Node: \033[1;36m{node_name}\033[0m in [\033[1;33m{formatted_namespace}\033[0m] πŸ”„"
)
meta_title = f"πŸ€” `{node_name}` in `{formatted_namespace}`"
response.append(ChatMessage(content="", metadata={"title": meta_title, "status": "pending"}))
yield response
print("- " * 25)
# λ…Έλ“œμ˜ 청크 데이터 좜λ ₯
out_str = []
if isinstance(node_chunk, dict):
for k, v in node_chunk.items():
if isinstance(v, BaseMessage):
v.pretty_print()
out_str.append(v.pretty_repr())
elif isinstance(v, list):
for list_item in v:
if isinstance(list_item, BaseMessage):
list_item.pretty_print()
out_str.append(list_item.pretty_repr())
else:
out_str.append(list_item)
print(list_item)
elif isinstance(v, dict):
for node_chunk_key, node_chunk_value in node_chunk.items():
out_str.append(f"{node_chunk_key}:\n{node_chunk_value}")
print(f"{node_chunk_key}:\n{node_chunk_value}")
else:
out_str.append(f"{k}:\n{v}")
print(f"\033[1;32m{k}\033[0m:\n{v}")
response[-1].content = "\n".join(out_str)
yield response
else:
if node_chunk is not None:
for item in node_chunk:
out_str.append(item)
print(item)
response[-1].content = "\n".join(out_str)
yield response
yield response
print("=" * 50)
response[-1].metadata["status"] = "done"
response.append(ChatMessage(content=node_chunk['messages'][-1].content))
yield response
demo = gr.ChatInterface(
generate_response,
type="messages",
title="Nested Thoughts Chat Interface",
examples=["2024λ…„μ˜ the FAANG companies 총 근둜자규λͺ¨μ— λŒ€ν•œ 뢄석을 ν•œκ΅­μ–΄λ‘œ 뢀탁해!"]
)
if __name__ == "__main__":
demo.launch(ssr_mode=False)