| | import gradio as gr |
| | from langchain_ollama import ChatOllama |
| | from langgraph.graph import StateGraph, START, END |
| | from langgraph.graph.message import MessagesState |
| | from langgraph.checkpoint.memory import MemorySaver |
| | from langchain_core.messages import ( |
| | convert_to_openai_messages, |
| | SystemMessage, |
| | HumanMessage, |
| | ) |
| |
|
| | def create_conversation_graph(): |
| | """ |
| | Create a conversational graph with a memory saver. |
| | """ |
| | memory = MemorySaver() |
| |
|
| | llm = ChatOllama(model="gemma3:4b", temperature=0) |
| |
|
| | sys_msg = SystemMessage(content="You are a helpful assistant tasked with performing arithmetic on a set of inputs.") |
| |
|
| | def assistant(state: MessagesState) -> MessagesState: |
| | return {"messages": [llm.invoke([sys_msg] + state["messages"])]} |
| |
|
| | builder = StateGraph(MessagesState) |
| |
|
| | builder.add_node("assistant", assistant) |
| | builder.add_edge(START, "assistant") |
| | builder.add_edge("assistant", END) |
| |
|
| | graph = builder.compile(checkpointer=memory) |
| | return graph |
| |
|
| |
|
| | def create_chat_interface(): |
| | """ |
| | Create and configure the chat interface with the conversation graph. |
| | """ |
| | graph = create_conversation_graph() |
| |
|
| | |
| | thread_id = "123" |
| | config = {"configurable": {"thread_id": thread_id}} |
| |
|
| | def chat_with_assistant(message, history): |
| | """ |
| | Chat with the assistant using the conversational graph. |
| | """ |
| | |
| | messages_state = MessagesState(messages=[HumanMessage(content=message)]) |
| |
|
| | |
| | response = graph.invoke(messages_state, config) |
| |
|
| | |
| | ai_message = response["messages"][-1] |
| |
|
| | |
| | return convert_to_openai_messages(ai_message) |
| |
|
| |
|
| | demo = gr.ChatInterface( |
| | fn=chat_with_assistant, |
| | type="messages", |
| | title="Conversational Bot", |
| | description="Ask anything you want", |
| | examples=["Hello", "What is your name?", "What is the weather in Tokyo?"], |
| | ) |
| | return demo |
| |
|
| | if __name__ == "__main__": |
| | demo = create_chat_interface() |
| | demo.launch() |
| |
|
| |
|
| |
|