{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "%%capture --no-stderr\n", "%pip install -U tavily-python langchain_community" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "True" ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import os\n", "from dotenv import load_dotenv\n", "\n", "load_dotenv()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "openai_api_key = os.getenv(\"OPENAI_API_KEY\")\n", "model = os.getenv(\"OPENAI_MODEL\", \"gpt-4o\")\n", "temperature = float(os.getenv(\"OPENAI_TEMPERATURE\", 0))" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "from langchain_community.tools.tavily_search import TavilySearchResults\n", "\n", "tool = TavilySearchResults(max_results=2)\n", "tools = [tool]" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "" ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from typing import Annotated\n", "from langchain_openai import ChatOpenAI as Chat\n", "\n", "from langchain_community.tools.tavily_search import TavilySearchResults\n", "from typing_extensions import TypedDict\n", "\n", "from langgraph.checkpoint.memory import MemorySaver\n", "from langgraph.graph import StateGraph, START\n", "from langgraph.graph.message import add_messages\n", "from langgraph.prebuilt import ToolNode, tools_condition\n", "\n", "memory = MemorySaver()\n", "\n", "\n", "class State(TypedDict):\n", " messages: Annotated[list, add_messages]\n", "\n", "\n", "graph_builder = StateGraph(State)\n", "\n", "\n", "tool = TavilySearchResults(max_results=2)\n", "tools = [tool]\n", "llm = Chat(\n", " openai_api_key=openai_api_key,\n", " model=model,\n", " temperature=temperature\n", ")\n", "llm_with_tools = llm.bind_tools(tools)\n", "\n", "\n", "def chatbot(state: State):\n", " return {\"messages\": [llm_with_tools.invoke(state[\"messages\"])]}\n", "\n", "\n", "graph_builder.add_node(\"chatbot\", chatbot)\n", "\n", "tool_node = ToolNode(tools=[tool])\n", "graph_builder.add_node(\"tools\", tool_node)\n", "\n", "graph_builder.add_conditional_edges(\n", " \"chatbot\",\n", " tools_condition,\n", ")\n", "graph_builder.add_edge(\"tools\", \"chatbot\")\n", "graph_builder.add_edge(START, \"chatbot\")" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "graph = graph_builder.compile(\n", " checkpointer=memory,\n", " # This is new!\n", " interrupt_before=[\"tools\"],\n", " # Note: can also interrupt __after__ tools, if desired.\n", " # interrupt_after=[\"tools\"]\n", ")" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "================================\u001b[1m Human Message \u001b[0m=================================\n", "\n", "I'm learning LangGraph. Could you do some research on it for me?\n", "==================================\u001b[1m Ai Message \u001b[0m==================================\n", "Tool Calls:\n", " tavily_search_results_json (call_rrzd6xIpsEpb8KbDwRtjJGSm)\n", " Call ID: call_rrzd6xIpsEpb8KbDwRtjJGSm\n", " Args:\n", " query: LangGraph programming language\n" ] } ], "source": [ "user_input = \"I'm learning LangGraph. Could you do some research on it for me?\"\n", "config = {\"configurable\": {\"thread_id\": \"1\"}}\n", "# The config is the **second positional argument** to stream() or invoke()!\n", "events = graph.stream(\n", " {\"messages\": [(\"user\", user_input)]}, config, stream_mode=\"values\"\n", ")\n", "for event in events:\n", " if \"messages\" in event:\n", " event[\"messages\"][-1].pretty_print()" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "('tools',)" ] }, "execution_count": 11, "metadata": {}, "output_type": "execute_result" } ], "source": [ "snapshot = graph.get_state(config)\n", "snapshot.next" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "[{'name': 'tavily_search_results_json',\n", " 'args': {'query': 'LangGraph programming language'},\n", " 'id': 'call_rrzd6xIpsEpb8KbDwRtjJGSm',\n", " 'type': 'tool_call'}]" ] }, "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ "existing_message = snapshot.values[\"messages\"][-1]\n", "existing_message.tool_calls" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "==================================\u001b[1m Ai Message \u001b[0m==================================\n", "Tool Calls:\n", " tavily_search_results_json (call_rrzd6xIpsEpb8KbDwRtjJGSm)\n", " Call ID: call_rrzd6xIpsEpb8KbDwRtjJGSm\n", " Args:\n", " query: LangGraph programming language\n", "=================================\u001b[1m Tool Message \u001b[0m=================================\n", "Name: tavily_search_results_json\n", "\n", "[{\"url\": \"https://www.datacamp.com/tutorial/langgraph-tutorial\", \"content\": \"LangGraph can be used to build a wide range of applications. Chatbots. LangGraph is ideal for developing sophisticated chatbots that can handle a wide array of user requests. By leveraging multiple LLM agents, these chatbots can process natural language queries, provide accurate responses, and seamlessly switch between different conversation\"}, {\"url\": \"https://github.com/langchain-ai/langgraph\", \"content\": \"Overview. LangGraph is a library for building stateful, multi-actor applications with LLMs, used to create agent and multi-agent workflows. Compared to other LLM frameworks, it offers these core benefits: cycles, controllability, and persistence. LangGraph allows you to define flows that involve cycles, essential for most agentic architectures\"}]\n", "==================================\u001b[1m Ai Message \u001b[0m==================================\n", "\n", "LangGraph is a library designed for building stateful, multi-actor applications using large language models (LLMs). It is particularly useful for creating agent and multi-agent workflows. Here are some key features and applications of LangGraph:\n", "\n", "1. **Applications**: LangGraph can be used to develop a wide range of applications, including sophisticated chatbots. These chatbots can handle various user requests, process natural language queries, provide accurate responses, and switch seamlessly between different conversation topics.\n", "\n", "2. **Core Benefits**:\n", " - **Cycles**: LangGraph supports the creation of workflows that involve cycles, which are essential for most agentic architectures.\n", " - **Controllability**: It offers a high degree of control over the workflows, allowing developers to fine-tune the behavior of the agents.\n", " - **Persistence**: LangGraph provides mechanisms to maintain the state of applications over time, which is crucial for building long-running applications.\n", "\n", "For more detailed information, you can explore resources like the [LangGraph GitHub repository](https://github.com/langchain-ai/langgraph) or tutorials available on platforms like [DataCamp](https://www.datacamp.com/tutorial/langgraph-tutorial).\n" ] } ], "source": [ "# `None` will append nothing new to the current state, letting it resume as if it had never been interrupted\n", "events = graph.stream(None, config, stream_mode=\"values\")\n", "for event in events:\n", " if \"messages\" in event:\n", " event[\"messages\"][-1].pretty_print()" ] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.7" } }, "nbformat": 4, "nbformat_minor": 2 }