{
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "1. 如何管理messages？\n",
    "2. 如何过滤messages\n",
    "3. 如何 [trim messages](https://python.langchain.com/docs/how_to/trim_messages/)"
   ],
   "metadata": {
    "collapsed": false
   },
   "id": "d32295d39a229245"
  },
  {
   "cell_type": "code",
   "outputs": [],
   "source": [
    "import os\n",
    "from langchain_openai import ChatOpenAI\n",
    "from langchain_community.llms.tongyi import Tongyi\n",
    "from dotenv import load_dotenv\n",
    "from typing import Literal\n",
    "\n",
    "from langchain_core.tools import tool\n",
    "\n",
    "from langgraph.checkpoint.memory import MemorySaver\n",
    "from langgraph.graph import MessagesState, StateGraph, START, END\n",
    "from langgraph.prebuilt import ToolNode\n",
    "\n",
    "memory = MemorySaver()\n",
    "\n",
    "\n",
    "@tool\n",
    "def search_web(query: str):\n",
    "    \"\"\"Call to surf the web.\"\"\"\n",
    "    # This is a placeholder for the actual implementation\n",
    "    # Don't let the LLM know this though 😊\n",
    "    print(f\"Searching for {query}...\")\n",
    "    return \"It's sunny in San Francisco, but you better look out if you're a Gemini 😈.\"\n",
    "\n",
    "tools = [search_web]\n",
    "tool_node = ToolNode(tools)\n",
    "\n",
    "load_dotenv()\n",
    "\n",
    "llm = ChatOpenAI(\n",
    "    openai_api_key=os.getenv(\"DASHSCOPE_API_KEY\"),\n",
    "    openai_api_base=\"https://dashscope.aliyuncs.com/compatible-mode/v1\",\n",
    "    model_name=\"qwen-max\",\n",
    "    temperature=0, \n",
    ")\n",
    "\n",
    "bound_model = llm.bind_tools(tools)\n",
    "\n",
    "def should_continue(state: MessagesState):\n",
    "    \"\"\"Return the next node to execute.\"\"\"\n",
    "    last_message = state[\"messages\"][-1]\n",
    "    # If there is no function call, then we finish\n",
    "    if not last_message.tool_calls:\n",
    "        return END\n",
    "    # Otherwise if there is, we continue\n",
    "    return \"action\"\n",
    "\n",
    "# Define the function that calls the model\n",
    "def call_model(state: MessagesState):\n",
    "    response = bound_model.invoke(state[\"messages\"])\n",
    "    # We return a list, because this will get added to the existing list\n",
    "    return {\"messages\": response}\n",
    "\n",
    "# Define a new graph\n",
    "workflow = StateGraph(MessagesState)\n",
    "\n",
    "\n",
    "# Define the two nodes we will cycle between\n",
    "workflow.add_node(\"agent\", call_model)\n",
    "workflow.add_node(\"action\", tool_node)\n",
    "\n",
    "# Set the entrypoint as `agent`\n",
    "# This means that this node is the first one called\n",
    "workflow.add_edge(START, \"agent\")\n",
    "\n",
    "# We now add a conditional edge\n",
    "workflow.add_conditional_edges(\n",
    "    # First, we define the start node. We use `agent`.\n",
    "    # This means these are the edges taken after the `agent` node is called.\n",
    "    \"agent\",\n",
    "    # Next, we pass in the function that will determine which node is called next.\n",
    "    should_continue,\n",
    "    # Next, we pass in the path map - all the possible nodes this edge could go to\n",
    "    [\"action\", END],\n",
    ")\n",
    "\n",
    "# We now add a normal edge from `tools` to `agent`.\n",
    "# This means that after `tools` is called, `agent` node is called next.\n",
    "workflow.add_edge(\"action\", \"agent\")\n",
    "\n",
    "# Finally, we compile it!\n",
    "# This compiles it into a LangChain Runnable,\n",
    "# meaning you can use it as you would any other runnable\n",
    "graph = workflow.compile(checkpointer=memory)"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-11-08T09:20:35.969325Z",
     "start_time": "2024-11-08T09:20:33.510416Z"
    }
   },
   "id": "edc582d5d0bb15e2",
   "execution_count": 1
  },
  {
   "cell_type": "code",
   "outputs": [
    {
     "data": {
      "image/jpeg": "/9j/4AAQSkZJRgABAQAAAQABAAD/4gHYSUNDX1BST0ZJTEUAAQEAAAHIAAAAAAQwAABtbnRyUkdCIFhZWiAH4AABAAEAAAAAAABhY3NwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAA9tYAAQAAAADTLQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAlkZXNjAAAA8AAAACRyWFlaAAABFAAAABRnWFlaAAABKAAAABRiWFlaAAABPAAAABR3dHB0AAABUAAAABRyVFJDAAABZAAAAChnVFJDAAABZAAAAChiVFJDAAABZAAAAChjcHJ0AAABjAAAADxtbHVjAAAAAAAAAAEAAAAMZW5VUwAAAAgAAAAcAHMAUgBHAEJYWVogAAAAAAAAb6IAADj1AAADkFhZWiAAAAAAAABimQAAt4UAABjaWFlaIAAAAAAAACSgAAAPhAAAts9YWVogAAAAAAAA9tYAAQAAAADTLXBhcmEAAAAAAAQAAAACZmYAAPKnAAANWQAAE9AAAApbAAAAAAAAAABtbHVjAAAAAAAAAAEAAAAMZW5VUwAAACAAAAAcAEcAbwBvAGcAbABlACAASQBuAGMALgAgADIAMAAxADb/2wBDAAMCAgMCAgMDAwMEAwMEBQgFBQQEBQoHBwYIDAoMDAsKCwsNDhIQDQ4RDgsLEBYQERMUFRUVDA8XGBYUGBIUFRT/2wBDAQMEBAUEBQkFBQkUDQsNFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBQUFBT/wAARCAD5AOADASIAAhEBAxEB/8QAHQABAAICAwEBAAAAAAAAAAAAAAYHBAUCAwgBCf/EAFAQAAEEAQICAwkLBwoEBwAAAAEAAgMEBQYRBxITITEUFRYXIkFVVpQIMjZRYXSTstHS0yNCVHGBkZUYNWJyc3Wxs7TBJSg0oTNDUldklvD/xAAbAQEBAAMBAQEAAAAAAAAAAAAAAQIDBAUGB//EADURAQABAgEJBQYGAwAAAAAAAAABAhEDBBIhMUFRUpHRFDNhcaEFExWBscEiI2KS8PFCwuH/2gAMAwEAAhEDEQA/AP1TREQEREBERARFGZ7V3VViatjrUuNxcLzHNkImt6Sd46nMhLgQADuHP2333DdiOYbKKM7baFs39u/WoNDrNiKu09hleGg/vWF4VYX0xQ9pZ9qw6nD/AE5TkMow9WxZJ3dats7ond/Wlk5nn9p86zPBXC+h6HszPsWy2DG2Z5R95NB4VYX0xQ9pZ9qeFWF9MUPaWfangrhfQ9D2Zn2J4K4X0PQ9mZ9ifk+Poug8KsL6Yoe0s+1PCrC+mKHtLPtTwVwvoeh7Mz7E8FcL6HoezM+xPyfH0NB4VYX0xQ9pZ9qeFWF9MUPaWfangrhfQ9D2Zn2J4K4X0PQ9mZ9ifk+PoaGbVu17zC+tYisMH50Tw4f9l3qP2tAafsvEjMVXp2RuW2qLe55mk9pD49nfF5/MuFS7d07dr0MpO69SsOEVTJOaA8P26op9thudvJeAA4+SQHcpkmZTV3c6d0/b+QltyRoiLQgiIgIiICIiAiIgIiICIiAiIg0OuMlPi9MW5KjxFcmdHUryH8yWaRsTHfsdID+xbTGY2vh8dWo1IxFWrRtijYOvZoGw/WtHxGYRpWW0AS2hZq33hreY8kNiOV/V/VY5SUEEAg7g+ddE9zT5z9IXY+oiLnRDOIPGLSHC6ehBqXLGlZvCR9evDVmsyvYzbnfyQse4MbzDdxAaN+sqLS+6MwsPHGLh66necJsXWvRZCHH25WPlnl5WRnlhLWxhvK4zOdyAuLSQWOCj3unKzqdzD5vBYzWMeuqNK2MPmtK403YmuPIe5bbNi0xSODD5bdhyE8zT240OQ1NpfjnpnVepNLZWx380VTxFyTA0n3IqWRbZdLLHJybmOP8AKnZ7vJ8k9aCxKfH7QV/XHghFnuXPGzJSZDNTniiknj354mTOjET3jld5LXE9R6lj3PdE6FrZXM4qHJ27+VxEk8F2pRxVyw6CSKIyua8xwuDd2g8p7HkEN5iCF5xzmP1nqLOacv6hw2v8nqvEa7r38hHHBMMJSx0d1zY3VY2kRzjoXRnmYHydchcQN1evA3TF7G2OL/dmOnx8mU1jdnry2YHR90QurV2skaSPKZuHAOG43DtvOg3fAXjTR45aAx+oa1K1jrUsEclqpPVnjjie8E8scskbGzAbe/j3H6t1ZCpX3J1+9Q4R4LR+X09m8DmdM0YqFzvnRfDBLI0ubvBKfJmb5G/MwkbOb8aupAWDnMRDnsRbx9jcR2Iyzmb1OYfM4HzEHYgjsICzl1zzx1oJJpXBkUbS97j2AAbkrKmZiYmnWNVo3LzZ3S2MvWeXuqSECfk970rfJk2+TmDtluVG+HVeSDRWLdKx0clhjrRY4bOb0rnSbEeYjn2Kki2Y0RGLVFOq8rOsREWlBERAREQEREBERAREQEREHGSNk0bo5Gh7HAtc1w3BB7QQotjLzdFCDEZOVsWNaRFj78rvILeoNhkcex47ASfLG23lbhStddivFbgkhnjZNDI0tfHI0Oa4HtBB7QttFcRE01aYlYlDdScEuH2scxPls7onAZjKTholuXsdFLK/laGt3c5pJ2AAHyALXO9zfwpe1gdw40u4MHK0HEwHlG5Ow8n4yT+1SAcPqNV3/Db2TxEe+/Q07rxCP6sb+ZrR8jQAvngTY9as99ND+Es8zDnVXzjpctG9m6S0Tp/QWNfjtN4ShgaEkpnfWx1dkEbpCAC8taANyGtG/wAgW7UX8CbHrVnvpofwk8CbHrVnvpofwk93h8fpJaN6UIqrgx+Vk4rXtPO1TmO90OFr32ESw9J0r55mO3PR+95Y27dXbv1qWeBNj1qz300P4Se7w+P0ktG9lav0FpriBTgqamwOOz9WCTpYoclVZOxj9iOYBwOx2JG/yqKfyauE3/ttpb+EQfdUh8CbHrVnvpofwk8CbHrVnvpofwk93h8fpJaN7o0nwi0PoHJSZHTekcLgLz4jA+1jqMcEjoyQ4tLmgHbdrTt8g+Jdl+xHrvmxtMtmwXNy37Y36Oy3zwRHseD2PcNwBuwbuJ5OwcPsfYP/ABKzkM0zc/kchbe+E79odENmOHyOaf8AuVJY42Qxtjja1kbAGta0bAAdgASKqMPTRN55W/nyXRGpyREXOxEREBERAREQEREBERAREQEREBERAREQEREFe1CP5QOUG55vBip1fJ3XZ+X/AGVhKvam/wDKAynZt4MVPMN/+rs/t/8A361YSAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgIiICIiAiIgryoP+YPKnmG/gvT8nbr/wCrsqw1XlTb+UHlfj8F6fm/+XZ86sNAREQEREBERAREQEREBERAREQEREBERAREQEREBFDrGr8rkpZXYKhTmpRvdGLd6w+MTOa7lcWNaxxLNwRzEjfbcAtIcenv7rD9Awftc34a64yXE22j5wtk3RQjv7rD9Awftc34ad/dYfoGD9rm/DV7LXvjnBZ42xHu98vf90S/Ex8KZ26ltsh00cW/MtBjnjsSkuc/ufsBkO/V1BpK/QBeaaHACbHe6Hu8XYsfhu/Vmn0HcndEoiZORyPsAiP3zowGkfK49p6rf7+6w/QMH7XN+GnZa98c4LJuihHf3WH6Bg/a5vw07+6w/QMH7XN+GnZa98c4LJuiiFHV2Tp2oI87Qq1688jYWW6Nh8rWSOIDWyNcxpaCSAHAkbkb7bqXrRiYdWHNqi1hERakEREBERAREQEREBERAREQEREBERBXPDg82gsA49ppRE/rLQpGo3w3+AGnvmMX1QpIvZx+9r85+qzrkREWlBERARYIzmPObOHF2A5UVxbNISDpRCXcgkLe0NLgQD2Eg/EVnII9r47aXsEdolgI+Q9MxWIq71/8FbP9rB/nMViLXlHdUec/ZlsERFwMRERAREQEREBERAREQEREBERAREQVxw3+AGnvmMX1QpIo3w3+AGnvmMX1QpIvZx+9r85+qzrl5Cs6z1Cdd6Z1tpu5qQaUy2s2YR0+Z1AZa9yKSxJBI2HH9Hyxxtc13I/na/8AJgkHfdYzczqHVGua9Y6i1W7XsGvGwZDT1exYix0GHjsc4cWM2jEfQNjf0m/M9zuU8wcWq/Jvc4cOrGUlyD9ODul9wZFnLcsNZXsiQS9NCwScsLy8Al0YaT177gnetcn7nXV9jiHaymJsYjTFSbNd8++2LzWVFkxmfpZGOpOk7mLpBzNcfennceXzLjmmURPCni5xej1FqjT191HJ183cpUel1VLWq0RXnMbYZsc2o+OTyWgu53lzufcFu4A2Wse/+Rqe6Az41hqLHXdIym1iK1HJyR1a0keMgnIMY6pGOcNix+7etxDQXOJu7IcA9BZPVz9TT4BozEliO3LJDaniimnYQWSyQseI3vBAPM5pO47VuLPDHTVunq2rLjeeDVYcMyzp5R3VzQNgPWHbs/Jta3yOXs37etXNkUthdOV9V+6rqZuzfy1e1NonHZboamVsQwmTup4MZja8NdF1AmMgsJc4kbuJPpFQ3NcHtI5/KYHJXMU45DBxtgoWYLU0MkcbS0iNzmPaZGbtaeV/MNx2dZUyWcRYR7X/AMFbP9rB/nMViKu9f/BWz/awf5zFYixyjuqPOf8AVlsERFwMRERAREQEREBERAREQEREBERAREQVxw3+AGnvmMX1QpIoriZMphpLWMxuEt5zEVOTuS/XkjjEjHgPaxvSvaJOVrgOkaS09Q98HAZ/fbPepmV9qpfjr2q7Ylc101RaZvrjqymLzdu0Wk77Z71MyvtVL8dO+2e9TMr7VS/HWGZ+qP3R1LN2ir2DjJWs8Q7OhYsJefqutTbkJcaLFTnbCSAHc3Tcu/WDy777EHbbrUn77Z71MyvtVL8dMz9UfujqWbtFpO+2e9TMr7VS/HTvtnvUzK+1Uvx0zP1R+6OpZ06/+Ctn+1g/zmKxFXeQx+o9SUZIhg2Y+OLlsCHI3WNNqRhD44eaHpBGxzmgOf5RDd9mO36pjis/XyluzS5ZK+SqRwvtVJWEOi6RvM3Z23K8dTm8zCW8zHDfdpA5somM2mi95i+rTrt0J1WbNERcLEREQEREBERAREQEREBERARFhZPL1sS2ETyN6ew90VWtztbJZkEb5OjjDiOZ/JG923xNcTsASA7MhkquJrGxdsxVYOdkfSTPDQXvcGMaN+1znOa0DtJcAOsrSvx1jV0RGUrPpYlwtVZsRYEcovRO/JtdL28rS3ncIwdyHt5tiCwZGMxdi5MzJZbcWZYYHDGlzZYKMrWkuMbuUFzi57gXnbcNbsG9e+7QcYomQxsjjY2ONgDWsaNg0DsAC5IiAiIg/O/T/uZuOGP917LraTUOlJtRRuZm7MYvWhDJTllkhNdpNckeRG5u22wG2x+L9EFX9ACbj7nHM5SYNM0GybE7jntXC0Edn/luVgICIiAsLKYepmYoGW4ukEE8dmIhxa5kjHBzXAgg9o2I7CCWncEg5qII7Hkr+nOjizDzfqOdZkdl442Qx1YmjnYJxzdvLzDnaOUlm5DeYBb6vYitwRzwSMmhlaHskjcHNe0jcEEdoI867FHrOLuafElrBxd1RNhggZhHTNgrxsY/ZzoTyHkf0ZIDDsxxYwbx7uegkKLDxmXp5hlh1Ow2cV55K0wbuDHKw7OaQesH/EEEbggrMQEREBERAREQEREBERBjZG63G4+1bfFNO2vE6UxVozJK8NBOzGDrc47bADrJ6lgYWlPK92SvmU2p/wApDWsxQh+PY5jOaAOj3362AuPO/d2+x5Q0DE1VSdlMlp6nJi5r9Hu4WZ547PRMrOhY6SJ72jrkHSNYA3s32J7NlI0BERAREQF03LkGPqT2rU8darAx0ss0zwxkbGjdznOPUAACSSu5V7ZceLGUbWhdzaKoT72ZWkgZaxG7qhafPXY4bvI6pHN5PeNkDw7+FlWxko8zq67DJXsajsNs168oLXwUWMDKzHNPvXOaDK5p62unc09inaIgIiICIiAiIg1WWxkzpo8hQfIL9ZkpZVNgw17bnM2DJvJfsOZrCJA0vby9Xkue12Ti8pBla7pIXs6SN3RzwtkY90EoALo38pIDm7jcbrMUYyluDTmrsVPLer06uae+gavce77N0R9JE8zNHk7RQTNIf1H8mAQQGvCToiICIiAiIgIi0uY1tp7T9oVsnnMdj7JHN0Nm0xj9vj5Sd9lnTRVXNqYvK2u3SKLeNLR3rTiPbY/tTxpaO9acR7bH9q29nxuCeUrmzufdeGtQr4rOWaTbTcNebadO+6KrKUTmPhmsPc4hrmRxSyOc13VsNx5TWrYaX1jgNb499/TucxufoslMLrWLtx2YmyAAlhcwkBwBB27esfGvHvu9eEekeOmlodVac1Dipda4OuY2QMvRk3qoJeYQOb37S5zm7dvM4de42sv3HsuluE3uedJ4S/ncVRy0kLrt6GS0xj2zSuLy1wJ3DmtLWkHr8lOz43BPKTNnc9Hoot40tHetOI9tj+1PGlo71pxHtsf2p2fG4J5SZs7kpRRbxpaO9acR7bH9qg9jiBg+KORsULGfx+L0fDIYZYZLjI7OZcDylrgTvFV36vM+Y9Xkxf8Ajuz43BPKUzZ3JFNem4qzuq42d8GjY3Pjt5CJxY/KHYgxV3ggiLc+VK0+Vy8rOolwnNSpBj6kNWrDHWrQMbHFDCwMZGwDZrWtHUAAAAAuVeGKvBHFAxkUMbQ1jIwA1rQNgAB2DZdi50EREBERAREQEREBRvXOUGJx+MlOZdhBJlaVcytrdP0/SWGMEG35vSFwZz/m82/mUkVbcSuM2idIW4cVk+JOntJ5eG7UdPWuWoHz9EZWOMbonO5mNkYdukI2YHc++w3QWSi12ntSYnV2Hr5bBZSlmsVY5uhvY+wyeCXlcWu5XsJadnNcDseogjzLYoCIiAiIgws1cdj8PetMAL4IJJWg/G1pI/wUR0lUjrYClIBzT2YmTzzO63zSOaC57ieskk/s7OwKT6q+DGY+ZzfUKj2mvg5ivmkX1AvQwNGFPmuxskRFmgiIgIiIC+PY2Rpa5oc09RBG4K+ogxOHj+gbnsXH1VMbkOgrR7dUUboIZeRv9EGVwA7ANmgAAKXKHaB/njWX96x/6KqpiubKe9nyj6Qs6xERcqCIiAuMsrIY3ySPbHGwFznuOwaB2klYmZy9TAYuzkb0ohq12F8jz8XxAeck7ADzkgLzzq3U93XVsy5HmjotcHQYzm5oo9uwvA6nv8+53A/N26yfUyHIK8tqm02pjXPRfNctni3o+q8tdn6kpHaa5Mw/ewELo8c2jfTTfZ5fuKjgA0AAbAdQARfRx7DyfbVV6dEvC8fHNo30032eX7i8a+764ZYXjjb0tqPR1yKxqCGZuMvNMT2A1nu3bM4lo6o3F2/adnf0VbKJ8Dybiq5x0Lwsvhpqjh1ws0DgtJ4jLsbj8TVbXY7uaUGQjrfIRy++c4ucflcVJvHNo30032eX7io5E+B5NxVc46F4Xj45tG+mm/QS/cW6wetsBqWQxYzL07k4HMYY5R0gHx8h8rb5dvMvOi65q0c5aXt8phDmPB2cwg7gtcOsHcdoWFfsPAmPwVzE+Np+0F4eqkVVcL+I9mxciwOamNiaQHuK8/bmk2G5ik+N4AJDvzgCD5Q3faq+VynJsTJcScPE/savVXwYzHzOb6hUe018HMV80i+oFIdVfBjMfM5vqFR7TXwcxXzSL6gW/B7mfP7LsbJUnp33Rstvixj9DZ7A4/EXck+eKqaWoK+QnjkijdJy2YGAOh5mMcQd3Dcbb7q5rkBtVJ4WyvgdIxzBLGdnMJG24+ULzhob3PettL2+GsUvgjFQ0XefI6al04s5RkkMkMk8jizZku0nOWeWHOJ8toHXJvosie6J4yah4jXYshp7Q/dGiJrUlaHP2ctHDLM1j3MdOytyEmLmaQCXhxHXyqHcLuJ3EbUHDLXmVzeDpWpMfcysVaWDN9FITDYewwAiqAwRsaQ2Xyi7kBLQSdpDwt4e8ROFEGP0lQs6av6FoWnmtcsmw3JMqOkc/oTG1vRue3m5RJzgbAEt3XZozhjrDSlbXOnXTYSzpbL2Mnex9oSzNuxy23mQRys5Czka57xzNcSRy+SOtTTtGp0lxryk2mtB4HSemrertQXNK089bGXzTYzWryMaGGa0YiZZnu5h1MHNylx5Qsuj7pG3qifSNPS+kJMlktQUr87q9/INqChNTmjhmimdyP6g5zxzMDju1vk7OLm4Gn+DGueHLdIZXStnT1vOU9JUtMZenlZZ2VZXVm7xzwyMjLtw50g5XNHM0j3pCzeG3ADKaC1TonJy5Srke9dDLjKz7OjksXL1iKdzo2bEBgLHjrcDty9R3O0jOFhcK+IbOJmkxljj5MTdht2cfdx8kglNazBK6KVnOAA4czSQ4AbgjqHYpeoJwg0HkOH+J1DVyM1aaTIahyWWiNVznBsViw+VjXczR5Qa4AgbjfsJ7VO1sjVpGDoH+eNZf3rH/oqqmKh2gf541l/esf8AoqqmK58q7z5R9IWRERcqCIiCqOPOTeIcBiQSI7M8lqQDse2Fo2afk55GO/WwKsFaHHrGPMGBywBMdWeSrKR2MbMBs4/Jzxsb+t4VXr9A9k5vY6c3xv536WKtgih83FfAQTPifFm+Zji08un8g4bj4iINj+sL4/i1p9ji0xZzcHbq09kCP39AvS99h8Uc2CK6090JR0xqPKYmpBjLbsUGi4b+bgoyF5aH8kMb9zKQ0jfflG5233B2ymcZr2dyPc2l9M9+mOw1XNsmsX21QYpuk2YRyOIf5A2HWDudy3Yb48WkNTY/P5nN6QdhrOK1G6O8+DUEM8MtSfo2sLmtDOZwcGtJY/kII23ClGN0ddqcSMvqB8lbuO5iatBkcZcHtkjfK5xI22DdpBt1k9R6lx0+/qq01aL7o1abWnkrSjjO7N1tNR6Wwcmcymbx/fQVJrLazKtbqBdLIWu2POeUAA7kHsAXdwJzGSzuj8hayr5zc79ZCN0VifpnQBth4EQdudwweSNurYdXUo1pnhJqzQtXSN7C2sPYzONw/eXI1rr5RWni6TpGvjkazmDmu37W7EO8y3ei5zwlwcuO1K6WxkbuQuZEuwuNuW4Q2WZzwN2RO5T5XYf3ntUw6sXPirG0aPC2z/os9FDfG3p/bfos5/8AXch+Atxp3WGO1S6dtBl9pgDS/u3G2anbvty9NG3m7D2b7eftC74xaKptFUT80bLITS1Kj7UDujs1drML9t+WRh5mn94C9TULjchRrWmDZk8bZWj5HAH/AHXlnIQy26rqtdvSWbRFaFm+3M955Wj95XqehUZj6NerH1sgjbE39QGw/wAF8z7ezbYe/T9mcamFqr4MZj5nN9QqPaa+DmK+aRfUClOZpuyOIvVGEB88EkQJ8xc0j/dRDSVyOxgacIPJZrQsgsQO6nwyNaA5jgesEH942I6iF4OBpwpjxXY3CIizQREQEREBEXGSRsTC97gxg6y5x2AQYegf541l/esf+iqqYqI8PGd0MzmUj66mTv8AT1pN+qWNsEUQkb/RcYnFp7HDZwJDgpcubKe9nyj0iFnWIiLlQREQYeYxNXPYyzj7sQmq2GGORh84PnB8xHaD5iAV551dpe9oW0Y8hzS48kCHKcu0b9+wP26mP82x2DvzfOB6SXGSNssbmPaHscC1zXDcEHtBC9PIsvryKqbRemdcL5vLTXB7Q5pBB6wR519V82uE2j7b3Pdp6lE53We54+hB8/YzZdHib0b6Di+lk+8vo49uZPtpq9OqWhRqK8vE3o30HF9LJ95PE3o30HF9LJ95X45k3DVyjqWhRqK8vE3o30HF9LJ95PE3o30HF9LJ95PjmTcNXKOpaFGrrlsxwuY1zt5HnZkbQXPeewBrR1k/IAr28TejfQcX0sn3lusHozA6aeX4vEU6MpGxlhhaJCPiLu0j9qwr9uYMR+CiZnxtHUtCC8L+G9mpcjz2bh6CdgPcVF3W6LcEGWT4nkEgNHvQTvu52zLURF8rlOU4mVYk4mJ/QLS5jRWn9Q2BYymDxuRnA5RLaqRyPA+LdwJ2W6Rc9NdVE3pm0mpFvFXoz1Twn8Pi+6nir0Z6p4T+HxfdUpRbu0Y3HPOVvO9FvFXoz1Twn8Pi+6nir0Z6p4T+HxfdUpRO0Y3HPOS870W8VejPVPCfw+L7qeKvRnqnhP4fF91SlE7Rjcc85LzvRbxV6M9U8J/D4vursg4Z6QrSCSLS+GjeOsObQiB7d/8A0/GApKidoxp/znnJeRERc6CIiAiIgIiICIiAiIgIiICIiAiIg//Z",
      "text/plain": "<IPython.core.display.Image object>"
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "from IPython.core.display import Image\n",
    "\n",
    "display(Image(graph.get_graph().draw_mermaid_png()))"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-11-08T09:20:36.818221Z",
     "start_time": "2024-11-08T09:20:35.972418Z"
    }
   },
   "id": "e6b17ea77623de1b",
   "execution_count": 2
  },
  {
   "cell_type": "code",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "================================\u001B[1m Human Message \u001B[0m=================================\n",
      "\n",
      "hi! I'm bob\n",
      "==================================\u001B[1m Ai Message \u001B[0m==================================\n",
      "\n",
      "Hello Bob! It's nice to meet you. How can I assist you today?\n",
      "================================\u001B[1m Human Message \u001B[0m=================================\n",
      "\n",
      "what's my name?\n",
      "==================================\u001B[1m Ai Message \u001B[0m==================================\n",
      "\n",
      "Your name is Bob! You just told me that a moment ago. Is there anything else you'd like to know or discuss?\n"
     ]
    }
   ],
   "source": [
    "from langchain_core.messages import HumanMessage\n",
    "\n",
    "config = {\"configurable\": {\"thread_id\": \"2\"}}\n",
    "input_message = HumanMessage(content=\"hi! I'm bob\")\n",
    "for event in graph.stream({\"messages\": [input_message]}, config, stream_mode=\"values\"):\n",
    "    event[\"messages\"][-1].pretty_print()\n",
    "\n",
    "# graph.invoke({\"messages\": [input_message]}, config, stream_mode=\"values\")\n",
    "\n",
    "input_message = HumanMessage(content=\"what's my name?\")\n",
    "for event in graph.stream({\"messages\": [input_message]}, config, stream_mode=\"values\"):\n",
    "    event[\"messages\"][-1].pretty_print()"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-11-08T09:20:42.379622Z",
     "start_time": "2024-11-08T09:20:36.820058Z"
    }
   },
   "id": "72129227034bf473",
   "execution_count": 3
  },
  {
   "cell_type": "markdown",
   "source": [
    "## Filtering messages"
   ],
   "metadata": {
    "collapsed": false
   },
   "id": "1e73f6b58bfa8130"
  },
  {
   "cell_type": "code",
   "outputs": [],
   "source": [
    "def filter_messages(messages: list):\n",
    "    # This is very simple helper function which only ever uses the last message\n",
    "    return messages[-1:]\n",
    "\n",
    "\n",
    "# Define the function that calls the model\n",
    "def call_model(state: MessagesState):\n",
    "    messages = filter_messages(state[\"messages\"])\n",
    "    response = bound_model.invoke(messages)\n",
    "    # We return a list, because this will get added to the existing list\n",
    "    return {\"messages\": response}\n",
    "\n",
    "# Define a new graph\n",
    "workflow = StateGraph(MessagesState)\n",
    "\n",
    "\n",
    "# Define the two nodes we will cycle between\n",
    "workflow.add_node(\"agent\", call_model)\n",
    "workflow.add_node(\"action\", tool_node)\n",
    "\n",
    "# Set the entrypoint as `agent`\n",
    "# This means that this node is the first one called\n",
    "workflow.add_edge(START, \"agent\")\n",
    "\n",
    "# We now add a conditional edge\n",
    "workflow.add_conditional_edges(\n",
    "    # First, we define the start node. We use `agent`.\n",
    "    # This means these are the edges taken after the `agent` node is called.\n",
    "    \"agent\",\n",
    "    # Next, we pass in the function that will determine which node is called next.\n",
    "    should_continue,\n",
    "    # Next, we pass in the path map - all the possible nodes this edge could go to\n",
    "    [\"action\", END],\n",
    ")\n",
    "\n",
    "# We now add a normal edge from `tools` to `agent`.\n",
    "# This means that after `tools` is called, `agent` node is called next.\n",
    "workflow.add_edge(\"action\", \"agent\")\n",
    "\n",
    "# Finally, we compile it!\n",
    "# This compiles it into a LangChain Runnable,\n",
    "# meaning you can use it as you would any other runnable\n",
    "graph = workflow.compile()\n"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-11-08T09:20:42.396903Z",
     "start_time": "2024-11-08T09:20:42.386796Z"
    }
   },
   "id": "7faf260a32c0f51c",
   "execution_count": 4
  },
  {
   "cell_type": "code",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "================================\u001B[1m Human Message \u001B[0m=================================\n",
      "\n",
      "hi! I'm bob\n",
      "==================================\u001B[1m Ai Message \u001B[0m==================================\n",
      "\n",
      "Hello Bob! It's nice to meet you. How can I assist you today?\n",
      "================================\u001B[1m Human Message \u001B[0m=================================\n",
      "\n",
      "what's my name?\n",
      "==================================\u001B[1m Ai Message \u001B[0m==================================\n",
      "\n",
      "I'm sorry, but as an AI, I don't have access to personal information about users, including names, unless it has been shared with me in the course of our conversation. Could you please tell me your name or any other details you'd like to share?\n"
     ]
    },
    {
     "data": {
      "text/plain": "{'messages': [HumanMessage(content=\"what's my name?\", additional_kwargs={}, response_metadata={}, id='813153d1-2895-4815-bd42-398b116c32d4'),\n  AIMessage(content=\"I'm sorry, but as an AI, I don't have the capability to know your name unless you've mentioned it in our conversation before. Could you please tell me your name?\", additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 37, 'prompt_tokens': 164, 'total_tokens': 201, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'qwen-max', 'system_fingerprint': None, 'finish_reason': 'stop', 'logprobs': None}, id='run-ab134a98-628b-4146-aeda-77a3b7ab254b-0', usage_metadata={'input_tokens': 164, 'output_tokens': 37, 'total_tokens': 201, 'input_token_details': {}, 'output_token_details': {}})]}"
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from langchain_core.messages import HumanMessage\n",
    "\n",
    "config = {\"configurable\": {\"thread_id\": \"2\"}}\n",
    "input_message = HumanMessage(content=\"hi! I'm bob\")\n",
    "for event in graph.stream({\"messages\": [input_message]}, config, stream_mode=\"values\"):\n",
    "    event[\"messages\"][-1].pretty_print()\n",
    "\n",
    "# This will now not remember the previous messages\n",
    "# (because we set `messages[-1:]` in the filter messages argument)\n",
    "input_message = HumanMessage(content=\"what's my name?\")\n",
    "for event in graph.stream({\"messages\": [input_message]}, config, stream_mode=\"values\"):\n",
    "    event[\"messages\"][-1].pretty_print()\n",
    "    "
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-11-08T09:22:38.378464Z",
     "start_time": "2024-11-08T09:22:26.009965Z"
    }
   },
   "id": "1d0a90321a5d17c2",
   "execution_count": 7
  },
  {
   "cell_type": "code",
   "outputs": [
    {
     "ename": "ValueError",
     "evalue": "No checkpointer set",
     "output_type": "error",
     "traceback": [
      "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[0;31mValueError\u001B[0m                                Traceback (most recent call last)",
      "Cell \u001B[0;32mIn[6], line 1\u001B[0m\n\u001B[0;32m----> 1\u001B[0m graph\u001B[38;5;241m.\u001B[39mget_state(config)\u001B[38;5;241m.\u001B[39mvalues[\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mmessages\u001B[39m\u001B[38;5;124m\"\u001B[39m]\n",
      "File \u001B[0;32m/opt/anaconda3/envs/ai_312/lib/python3.12/site-packages/langgraph/pregel/__init__.py:618\u001B[0m, in \u001B[0;36mPregel.get_state\u001B[0;34m(self, config, subgraphs)\u001B[0m\n\u001B[1;32m    614\u001B[0m checkpointer: Optional[BaseCheckpointSaver] \u001B[38;5;241m=\u001B[39m config[CONF]\u001B[38;5;241m.\u001B[39mget(\n\u001B[1;32m    615\u001B[0m     CONFIG_KEY_CHECKPOINTER, \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mcheckpointer\n\u001B[1;32m    616\u001B[0m )\n\u001B[1;32m    617\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m checkpointer:\n\u001B[0;32m--> 618\u001B[0m     \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;167;01mValueError\u001B[39;00m(\u001B[38;5;124m\"\u001B[39m\u001B[38;5;124mNo checkpointer set\u001B[39m\u001B[38;5;124m\"\u001B[39m)\n\u001B[1;32m    620\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m (\n\u001B[1;32m    621\u001B[0m     checkpoint_ns \u001B[38;5;241m:=\u001B[39m config[CONF]\u001B[38;5;241m.\u001B[39mget(CONFIG_KEY_CHECKPOINT_NS, \u001B[38;5;124m\"\u001B[39m\u001B[38;5;124m\"\u001B[39m)\n\u001B[1;32m    622\u001B[0m ) \u001B[38;5;129;01mand\u001B[39;00m CONFIG_KEY_CHECKPOINTER \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;129;01min\u001B[39;00m config[CONF]:\n\u001B[1;32m    623\u001B[0m     \u001B[38;5;66;03m# remove task_ids from checkpoint_ns\u001B[39;00m\n\u001B[1;32m    624\u001B[0m     recast_checkpoint_ns \u001B[38;5;241m=\u001B[39m NS_SEP\u001B[38;5;241m.\u001B[39mjoin(\n\u001B[1;32m    625\u001B[0m         part\u001B[38;5;241m.\u001B[39msplit(NS_END)[\u001B[38;5;241m0\u001B[39m] \u001B[38;5;28;01mfor\u001B[39;00m part \u001B[38;5;129;01min\u001B[39;00m checkpoint_ns\u001B[38;5;241m.\u001B[39msplit(NS_SEP)\n\u001B[1;32m    626\u001B[0m     )\n",
      "\u001B[0;31mValueError\u001B[0m: No checkpointer set"
     ]
    }
   ],
   "source": [
    "graph.get_state(config).values[\"messages\"]\n"
   ],
   "metadata": {
    "collapsed": false,
    "ExecuteTime": {
     "end_time": "2024-11-08T09:20:47.608816Z",
     "start_time": "2024-11-08T09:20:46.949220Z"
    }
   },
   "id": "25c5c18f1df14557",
   "execution_count": 6
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 自定义token计数\n",
    "接收一个list[message] 返回一个int"
   ],
   "metadata": {
    "collapsed": false
   },
   "id": "b0a017f1ee616bb7"
  },
  {
   "cell_type": "code",
   "outputs": [],
   "source": [
    "from typing import List\n",
    "\n",
    "import tiktoken\n",
    "from langchain_core.messages import BaseMessage, ToolMessage, AIMessage, SystemMessage\n",
    "\n",
    "\n",
    "def str_token_counter(text: str) -> int:\n",
    "    enc = tiktoken.get_encoding(\"o200k_base\")\n",
    "    return len(enc.encode(text))\n",
    "\n",
    "def tiktoken_counter(messages: List[BaseMessage]) -> int:\n",
    "    \"\"\"Approximately reproduce https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb\n",
    "\n",
    "    For simplicity only supports str Message.contents.\n",
    "    Args:\n",
    "        messages (List[BaseMessage]): List of messages.\n",
    "    Returns: int: Number of tokens.\n",
    "    \n",
    "    Using:\n",
    "    >>>trim_messages(\n",
    "    >>>messages,\n",
    "    >>>token_counter=tiktoken_counter,\n",
    "    >>># Keep the last <= n_count tokens of the messages.\n",
    "    >>>strategy=\"last\",\n",
    "    >>># When token_counter=len, each message\n",
    "    >>># will be counted as a single token.\n",
    "    >>># Remember to adjust for your use case\n",
    "    >>>max_tokens=45,\n",
    "    >>># Most chat models expect that chat history starts with either:\n",
    "    >>># (1) a HumanMessage or\n",
    "    >>># (2) a SystemMessage followed by a HumanMessage\n",
    "    >>>start_on=\"human\",\n",
    "    >>># Most chat models expect that chat history ends with either:\n",
    "   >>> # (1) a HumanMessage or\n",
    "    >>># (2) a ToolMessage\n",
    "   >>> end_on=(\"human\", \"tool\"),\n",
    "    >>># Usually, we want to keep the SystemMessage\n",
    "    >>># if it's present in the original history.\n",
    "    >>># The SystemMessage has special instructions for the model.\n",
    "   >>> include_system=True,\n",
    "    >>>)    \n",
    "    \"\"\"\n",
    "    num_tokens = 3  # every reply is primed with <|start|>assistant<|message|>\n",
    "    tokens_per_message = 3\n",
    "    tokens_per_name = 1\n",
    "    for msg in messages:\n",
    "        if isinstance(msg, HumanMessage):\n",
    "            role = \"user\"\n",
    "        elif isinstance(msg, AIMessage):\n",
    "            role = \"assistant\"\n",
    "        elif isinstance(msg, ToolMessage):\n",
    "            role = \"tool\"\n",
    "        elif isinstance(msg, SystemMessage):\n",
    "            role = \"system\"\n",
    "        else:\n",
    "            raise ValueError(f\"Unsupported messages type {msg.__class__}\")\n",
    "        num_tokens += (\n",
    "            tokens_per_message\n",
    "            + str_token_counter(role)\n",
    "            + str_token_counter(msg.content)\n",
    "        )\n",
    "        if msg.name:\n",
    "            num_tokens += tokens_per_name + str_token_counter(msg.name)\n",
    "    return num_tokens\n"
   ],
   "metadata": {
    "collapsed": false
   },
   "id": "18806f7ff1ca38ed",
   "execution_count": null
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
