Spaces:
Runtime error
Runtime error
import gradio as gr | |
import time | |
from gradio import ChatMessage | |
from langchain_core.runnables import RunnableConfig | |
from langchain_teddynote.messages import random_uuid | |
from langchain_core.messages import BaseMessage, HumanMessage | |
from pprint import pprint | |
from graph import app as workflow | |
def format_namespace(namespace): | |
return namespace[-1].split(":")[0] if len(namespace) > 0 else "root graph" | |
def generate_response(message, history): | |
inputs = { | |
"messages": [HumanMessage(content=message)], | |
} | |
node_names = [] | |
response = [] | |
for namespace, chunk in workflow.stream( | |
inputs, | |
stream_mode="updates", subgraphs=True | |
): | |
for node_name, node_chunk in chunk.items(): | |
# node_namesκ° λΉμ΄μμ§ μμ κ²½μ°μλ§ νν°λ§ | |
if len(node_names) > 0 and node_name not in node_names: | |
continue | |
if len(response) > 0: | |
response[-1].metadata["status"] = "done" | |
# print("\n" + "=" * 50) | |
msg = [] | |
formatted_namespace = format_namespace(namespace) | |
if formatted_namespace == "root graph": | |
print(f"π Node: \033[1;36m{node_name}\033[0m π") | |
meta_title = f"π€ `{node_name}`" | |
else: | |
print( | |
f"π Node: \033[1;36m{node_name}\033[0m in [\033[1;33m{formatted_namespace}\033[0m] π" | |
) | |
meta_title = f"π€ `{node_name}` in `{formatted_namespace}`" | |
response.append(ChatMessage(content="", metadata={"title": meta_title, "status": "pending"})) | |
yield response | |
print("- " * 25) | |
# λ Έλμ μ²ν¬ λ°μ΄ν° μΆλ ₯ | |
out_str = [] | |
if isinstance(node_chunk, dict): | |
for k, v in node_chunk.items(): | |
if isinstance(v, BaseMessage): | |
v.pretty_print() | |
out_str.append(v.pretty_repr()) | |
elif isinstance(v, list): | |
for list_item in v: | |
if isinstance(list_item, BaseMessage): | |
list_item.pretty_print() | |
out_str.append(list_item.pretty_repr()) | |
else: | |
out_str.append(list_item) | |
print(list_item) | |
elif isinstance(v, dict): | |
for node_chunk_key, node_chunk_value in node_chunk.items(): | |
out_str.append(f"{node_chunk_key}:\n{node_chunk_value}") | |
print(f"{node_chunk_key}:\n{node_chunk_value}") | |
else: | |
out_str.append(f"{k}:\n{v}") | |
print(f"\033[1;32m{k}\033[0m:\n{v}") | |
response[-1].content = "\n".join(out_str) | |
yield response | |
else: | |
if node_chunk is not None: | |
for item in node_chunk: | |
out_str.append(item) | |
print(item) | |
response[-1].content = "\n".join(out_str) | |
yield response | |
yield response | |
print("=" * 50) | |
response[-1].metadata["status"] = "done" | |
response.append(ChatMessage(content=node_chunk['messages'][-1].content)) | |
yield response | |
demo = gr.ChatInterface( | |
generate_response, | |
type="messages", | |
title="Nested Thoughts Chat Interface", | |
examples=["2024λ μ the FAANG companies μ΄ κ·Όλ‘μκ·λͺ¨μ λν λΆμμ νκ΅μ΄λ‘ λΆνν΄!"] | |
) | |
if __name__ == "__main__": | |
demo.launch(ssr_mode=False) |