{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[registered tool] {'function': {'description': 'Generates a random number x, s.t. range[0] <= x '\n",
      "                             '< range[1]',\n",
      "              'name': 'random_number_generator',\n",
      "              'parameters': {'properties': {'city_name': {'description': 'The '\n",
      "                                                                         'name '\n",
      "                                                                         'of '\n",
      "                                                                         'the '\n",
      "                                                                         'city '\n",
      "                                                                         'to '\n",
      "                                                                         'be '\n",
      "                                                                         'queried',\n",
      "                                                          'type': 'string'},\n",
      "                                            'range': {'description': 'The '\n",
      "                                                                     'range of '\n",
      "                                                                     'the '\n",
      "                                                                     'generated '\n",
      "                                                                     'numbers',\n",
      "                                                      'type': 'tuple[int, '\n",
      "                                                              'int]'},\n",
      "                                            'seed': {'description': 'The '\n",
      "                                                                    'random '\n",
      "                                                                    'seed used '\n",
      "                                                                    'by the '\n",
      "                                                                    'generator',\n",
      "                                                     'type': 'int'}},\n",
      "                             'required': ['seed', 'range'],\n",
      "                             'type': 'object'}},\n",
      " 'type': 'function'}\n",
      "[registered tool] {'function': {'description': 'Get the current weather for `city_name`',\n",
      "              'name': 'get_weather',\n",
      "              'parameters': {'properties': {'city_name': {'description': 'The '\n",
      "                                                                         'name '\n",
      "                                                                         'of '\n",
      "                                                                         'the '\n",
      "                                                                         'city '\n",
      "                                                                         'to '\n",
      "                                                                         'be '\n",
      "                                                                         'queried',\n",
      "                                                          'type': 'str'}},\n",
      "                             'required': ['city_name'],\n",
      "                             'type': 'object'}},\n",
      " 'type': 'function'}\n"
     ]
    }
   ],
   "source": [
    "from zhipuai import ZhipuAI\n",
    "from zhipu_tools_demo.tool_register import get_tools, dispatch_tool\n",
    "import json\n",
    "\n",
    "client = ZhipuAI(api_key=\"\") # 填写您自己的APIKey"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {},
   "outputs": [],
   "source": [
    "def run_conv_glm(messages, stream=False, tools=None, max_retry=5, model='glm-4'):\n",
    "\n",
    "    for _ in range(max_retry):\n",
    "        model_response = client.chat.completions.create(\n",
    "                model=model,\n",
    "                messages= messages,\n",
    "                tools = tools\n",
    "            )\n",
    "        print(f\"本次消耗tokens: {model_response.usage.total_tokens}\")\n",
    "        messages.append(model_response.choices[0].message.model_dump())\n",
    "        if not model_response.choices[0].message.tool_calls:\n",
    "            final_response = model_response.choices[0].message.content\n",
    "            return final_response\n",
    "        else:\n",
    "            tool_call = model_response.choices[0].message.tool_calls[0]\n",
    "            args = tool_call.function.arguments\n",
    "            print(f\"正在调用函数: {tool_call.function.name}，函数参数为：{args}\")\n",
    "            # 调用函数\n",
    "            function_result =  dispatch_tool(tool_call.function.name, json.loads(args))\n",
    "            # 构造tool message\n",
    "            messages.append({\n",
    "                \"role\": \"tool\", \n",
    "                \"content\": f\"{json.dumps(function_result)}\",\n",
    "                \"tool_call_id\":tool_call.id\n",
    "            })"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "CompletionMessage(content='根据API调用结果，今天杭州的天气状况为：温度为9摄氏度，湿度为87%，天气状况为“Mist”，观测时间为晚上11:59。此外，体感温度为8摄氏度。', role='assistant', tool_calls=None)"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages = [{'role': 'user', 'content': '请问今天杭州天气怎么样？'}]\n",
    "run_conv_glm(messages=messages, tools=get_tools())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'role': 'user', 'content': '请问今天杭州天气怎么样？'},\n",
       " {'role': 'tool',\n",
       "  'content': '\"{\\'current_condition\\': {\\'temp_C\\': \\'9\\', \\'FeelsLikeC\\': \\'8\\', \\'humidity\\': \\'87\\', \\'weatherDesc\\': [{\\'value\\': \\'Mist\\'}], \\'observation_time\\': \\'11:59 PM\\'}}\"',\n",
       "  'tool_call_id': 'call_8308766722699635503'},\n",
       " {'content': '根据API调用结果，今天杭州的天气状况为：温度为9摄氏度，湿度为87%，天气状况为“Mist”，观测时间为晚上11:59。此外，体感温度为8摄氏度。',\n",
       "  'role': 'assistant',\n",
       "  'tool_calls': None}]"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "CompletionMessage(content='根据API调用结果，今天北京的天气状况为：温度为-8摄氏度，湿度为92%，天气状况为“Mist”，观测时间为晚上10:32。此外，体感温度为-10摄氏度。', role='assistant', tool_calls=None)"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages.append({'role': 'user', 'content': '那北京呢？'})\n",
    "run_conv_glm(messages=messages, tools=get_tools())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "CompletionMessage(content='根据API调用结果，北京的温度为-8摄氏度，体感温度为-10摄氏度，而杭州的温度为9摄氏度，体感温度为8摄氏度。因此，北京更冷。', role='assistant', tool_calls=None)"
      ]
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages.append({'role': 'user', 'content': '那杭州更冷吗？'})\n",
    "run_conv_glm(messages=messages, tools=get_tools())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "use tokens: 308\n",
      "use tokens: 378\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "CompletionMessage(content='根据API返回结果，南昌今天有轻微的雨，当前温度为11摄氏度，湿度为90%。观测时间为凌晨1点50分。', role='assistant', tool_calls=None)"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages = [{'role': 'user', 'content': '请问今天南昌天气怎么样？'}]\n",
    "run_conv_glm(messages=messages, tools=get_tools())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "use tokens: 331\n",
      "use tokens: 410\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "CompletionMessage(content='根据API返回结果，南昌当前天气状况为：轻微细雨，温度为11摄氏度，湿度为90%。', role='assistant', tool_calls=None)"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages = []\n",
    "messages.append({\"role\": \"system\", \"content\": \"不要假设或猜测传入函数的参数值。如果用户的描述不明确，请要求用户提供必要信息\"})\n",
    "messages.append({\"role\": \"user\", \"content\": \"请问今天南昌天气怎么样？\"})\n",
    "run_conv_glm(messages=messages, tools=get_tools())\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'role': 'system', 'content': '不要假设或猜测传入函数的参数值。如果用户的描述不明确，请要求用户提供必要信息'},\n",
       " {'role': 'user', 'content': '请问今天南昌天气怎么样？'},\n",
       " {'content': None,\n",
       "  'role': 'assistant',\n",
       "  'tool_calls': [{'id': 'call_8308764214438518684',\n",
       "    'function': {'arguments': '{\"city_name\":\"南昌\"}', 'name': 'get_weather'},\n",
       "    'type': 'function'}]},\n",
       " {'role': 'tool',\n",
       "  'content': '\"{\\'current_condition\\': {\\'temp_C\\': \\'11\\', \\'FeelsLikeC\\': \\'10\\', \\'humidity\\': \\'90\\', \\'weatherDesc\\': [{\\'value\\': \\'Light drizzle\\'}], \\'observation_time\\': \\'01:50 AM\\'}}\"',\n",
       "  'tool_call_id': 'call_8308764214438518684'},\n",
       " {'content': '根据API返回结果，南昌当前天气状况为：轻微细雨，温度为11摄氏度，湿度为90%。',\n",
       "  'role': 'assistant',\n",
       "  'tool_calls': None}]"
      ]
     },
     "execution_count": 16,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "use tokens: 434\n",
      "use tokens: 520\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "CompletionMessage(content='根据API返回结果，杭州当前天气状况为：薄雾，温度为9摄氏度，湿度为87%。与南昌相比，杭州更冷一些。', role='assistant', tool_calls=None)"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages.append({\"role\": \"user\", \"content\": \"那杭州比南昌更冷吗？\"})\n",
    "run_conv_glm(messages=messages, tools=get_tools())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[{'role': 'system', 'content': '不要假设或猜测传入函数的参数值。如果用户的描述不明确，请要求用户提供必要信息'},\n",
       " {'role': 'user', 'content': '请问今天南昌天气怎么样？'},\n",
       " {'content': None,\n",
       "  'role': 'assistant',\n",
       "  'tool_calls': [{'id': 'call_8308764214438518684',\n",
       "    'function': {'arguments': '{\"city_name\":\"南昌\"}', 'name': 'get_weather'},\n",
       "    'type': 'function'}]},\n",
       " {'role': 'tool',\n",
       "  'content': '\"{\\'current_condition\\': {\\'temp_C\\': \\'11\\', \\'FeelsLikeC\\': \\'10\\', \\'humidity\\': \\'90\\', \\'weatherDesc\\': [{\\'value\\': \\'Light drizzle\\'}], \\'observation_time\\': \\'01:50 AM\\'}}\"',\n",
       "  'tool_call_id': 'call_8308764214438518684'},\n",
       " {'content': '根据API返回结果，南昌当前天气状况为：轻微细雨，温度为11摄氏度，湿度为90%。',\n",
       "  'role': 'assistant',\n",
       "  'tool_calls': None},\n",
       " {'role': 'user', 'content': '那杭州比南昌更冷吗？'},\n",
       " {'content': None,\n",
       "  'role': 'assistant',\n",
       "  'tool_calls': [{'id': 'call_8308765726267137602',\n",
       "    'function': {'arguments': '{\"city_name\":\"杭州\"}', 'name': 'get_weather'},\n",
       "    'type': 'function'}]},\n",
       " {'role': 'tool',\n",
       "  'content': '\"{\\'current_condition\\': {\\'temp_C\\': \\'9\\', \\'FeelsLikeC\\': \\'8\\', \\'humidity\\': \\'87\\', \\'weatherDesc\\': [{\\'value\\': \\'Mist\\'}], \\'observation_time\\': \\'11:59 PM\\'}}\"',\n",
       "  'tool_call_id': 'call_8308765726267137602'},\n",
       " {'content': '根据API返回结果，杭州当前天气状况为：薄雾，温度为9摄氏度，湿度为87%。与南昌相比，杭州更冷一些。',\n",
       "  'role': 'assistant',\n",
       "  'tool_calls': None}]"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "messages"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "- 创建多轮对话函数"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [],
   "source": [
    "def chat_with_glm(question, tools=None, model='glm-4'):\n",
    "    print(f\"用户问题：{question}\")\n",
    "    messages = []\n",
    "    messages.append({\"role\": \"system\", \"content\": \"不要假设或猜测传入函数的参数值。如果用户的描述不明确，请要求用户提供必要信息\"})\n",
    "    messages.append({\"role\": \"user\", \"content\": question})\n",
    "    while True:\n",
    "        answer = run_conv_glm(messages=messages, tools=tools, model=model)\n",
    "        print(f\"模型回答：{answer}\")\n",
    "\n",
    "        # 询问用户是否还有其他问题\n",
    "        question = input(\"您还有其他问题吗？（输入退出以结束对话）：\")\n",
    "        print(f\"用户问题：{question}\")\n",
    "        if question == \"退出\":\n",
    "            break\n",
    "        messages.append({\"role\": \"user\", \"content\":question})\n",
    "    del messages"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "用户问题：请问今天上海天气怎么样？\n",
      "use tokens: 331\n",
      "use tokens: 443\n",
      "模型回答：根据API调用结果，当前上海的天气情况为：温度为9摄氏度，湿度为87%，天气状况为“Mist”，观测时间为01:44 AM。需要注意的是，API返回的结果中还包括一个“FeelsLikeC”字段，表示体感温度为7摄氏度。\n",
      "用户问题：北京会比上海更冷吗\n",
      "use tokens: 477\n",
      "use tokens: 568\n",
      "模型回答：北京和上海的温度差异可能会因季节和天气条件而有所不同。如果您需要比较两地当前的温度，请提供各自的温度信息。\n",
      "用户问题：北京现在比上海更冷吗\n",
      "use tokens: 591\n",
      "use tokens: 674\n",
      "模型回答：根据查询结果，北京现在的温度为-8摄氏度，而上海的温度为9摄氏度。因此，北京现在比上海更冷。\n",
      "用户问题：杭州比上海更冷吗\n",
      "use tokens: 696\n",
      "use tokens: 774\n",
      "模型回答：根据查询结果，杭州现在的温度为9摄氏度，与上海的温度相同。因此，杭州现在和上海一样冷。\n",
      "用户问题：杭州比北京更冷吗\n",
      "use tokens: 797\n",
      "use tokens: 879\n",
      "模型回答：根据查询结果，杭州现在的温度为9摄氏度，而北京的温度为-8摄氏度。因此，杭州现在比北京暖和。\n",
      "用户问题：退出\n"
     ]
    }
   ],
   "source": [
    "chat_with_glm(\"请问今天上海天气怎么样？\", tools=get_tools())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "用户问题：请问今天上海天气怎么样？\n",
      "本次消耗tokens: 331\n",
      "正在调用函数: get_weather，函数参数为：{\"city_name\":\"上海\"}\n",
      "本次消耗tokens: 419\n",
      "模型回答：根据API调用结果，当前上海的天气状况为：温度为9摄氏度，湿度为87%，天气状况为“Mist”，观测时间为01:44 AM。\n",
      "用户问题：北京会比上海更冷吗\n",
      "本次消耗tokens: 453\n",
      "正在调用函数: random_number_generator，函数参数为：{\"city_name\":\"北京\",\"range\":[0,100],\"seed\":42}\n",
      "本次消耗tokens: 537\n",
      "模型回答：当前北京的天气状况我无法获取，但是根据历史数据，北京通常会比上海更冷。\n",
      "用户问题：北京现在比上海更冷吗\n",
      "本次消耗tokens: 560\n",
      "正在调用函数: get_weather，函数参数为：{\"city_name\":\"北京\"}\n",
      "本次消耗tokens: 676\n",
      "模型回答：根据API调用结果，当前北京的天气状况为：温度为-8摄氏度，湿度为92%，天气状况为“Mist”，观测时间为10:32 PM。相比之下，上海的天气要温暖一些，当前温度为9摄氏度。因此，可以得出结论，北京现在比上海更冷。\n",
      "用户问题：杭州比上海更冷吗\n",
      "本次消耗tokens: 699\n",
      "正在调用函数: get_weather，函数参数为：{\"city_name\":\"杭州\"}\n",
      "本次消耗tokens: 823\n",
      "模型回答：根据API调用结果，当前杭州的天气状况为：温度为9摄氏度，湿度为87%，天气状况为“Mist”，观测时间为11:59 PM。与上海相比，杭州的天气温度相同，但是杭州的湿度更高一些。因此，可以得出结论，杭州和上海的温度相同，但是杭州的湿度更高。\n",
      "用户问题：杭州比北京更冷吗\n",
      "本次消耗tokens: 845\n",
      "正在调用函数: get_weather，函数参数为：{\"city_name\":\"杭州\"}\n",
      "本次消耗tokens: 968\n",
      "模型回答：根据API调用结果，当前杭州的天气状况为：温度为9摄氏度，湿度为87%，天气状况为“Mist”，观测时间为11:59 PM。与北京相比，杭州的天气温度要高很多，因为北京的气温是-8摄氏度。因此，可以得出结论，杭州比北京要暖和得多。\n",
      "用户问题：杭州比北京更冷吗\n",
      "本次消耗tokens: 1013\n",
      "模型回答：根据我之前获取的信息，杭州的当前温度为9摄氏度，而北京的当前温度为-8摄氏度。因此，杭州比北京要暖和得多。\n",
      "用户问题：上海现在比北京更冷吗\n",
      "本次消耗tokens: 1036\n",
      "正在调用函数: get_weather，函数参数为：{\"city_name\":\"上海\"}\n",
      "本次消耗tokens: 1157\n",
      "模型回答：根据API调用结果，当前上海的天气状况为：温度为9摄氏度，湿度为87%，天气状况为“Mist”，观测时间为01:44 AM。与北京相比，上海的天气温度要高很多，因为北京的气温是-8摄氏度。因此，可以得出结论，上海比北京要暖和得多。\n",
      "用户问题：退出\n"
     ]
    }
   ],
   "source": [
    "chat_with_glm(\"请问今天上海天气怎么样？\", tools=get_tools())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "用户问题：请问今天上海天气怎么样？\n",
      "本次消耗tokens: 412\n",
      "正在调用函数: get_weather，函数参数为：{\"city_name\":\"上海\"}\n",
      "本次消耗tokens: 531\n",
      "模型回答：根据API调用结果，上海现在的气温为9摄氏度，体感温度为7摄氏度，湿度为87%，天气状况为 Mist，观测时间为01:44 AM。请注意，这是当前的天气状况，可能会随时变化。\n",
      "用户问题：北京今天比上海更冷吗\n",
      "本次消耗tokens: 561\n",
      "正在调用函数: get_weather，函数参数为：{\"city_name\":\"北京\"}\n",
      "本次消耗tokens: 704\n",
      "模型回答：根据API调用结果，北京现在的气温为0摄氏度，体感温度为-2摄氏度，湿度为69%，天气状况为 Sunny，观测时间为02:40 AM。请注意，这是当前的天气状况，可能会随时变化。根据这些信息，我们可以得出结论：北京现在比上海更冷，因为北京的气温低于上海。\n",
      "用户问题：杭州今天比北京更冷吗\n",
      "本次消耗tokens: 734\n",
      "正在调用函数: get_weather，函数参数为：{\"city_name\":\"杭州\"}\n",
      "本次消耗tokens: 878\n",
      "模型回答：根据API调用结果，杭州现在的气温为9摄氏度，体感温度为8摄氏度，湿度为87%，天气状况为 Mist，观测时间为11:59 PM。请注意，这是当前的天气状况，可能会随时变化。\n",
      "\n",
      "根据这些信息，我们可以得出结论：杭州今天比北京更暖和，因为杭州的气温高于北京。\n",
      "用户问题：杭州今天比上海更冷吗\n"
     ]
    },
    {
     "ename": "APIInternalError",
     "evalue": "Error code: 500, with error text {\"error\":{\"code\":\"500\",\"message\":\"Internal Error\"}}",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mAPIInternalError\u001b[0m                          Traceback (most recent call last)",
      "Cell \u001b[1;32mIn[36], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m \u001b[43mchat_with_glm\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m请问今天上海天气怎么样？\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtools\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mget_tools\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mglm-3-turbo\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n",
      "Cell \u001b[1;32mIn[35], line 7\u001b[0m, in \u001b[0;36mchat_with_glm\u001b[1;34m(question, tools, model)\u001b[0m\n\u001b[0;32m      5\u001b[0m messages\u001b[38;5;241m.\u001b[39mappend({\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrole\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124muser\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcontent\u001b[39m\u001b[38;5;124m\"\u001b[39m: question})\n\u001b[0;32m      6\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[1;32m----> 7\u001b[0m     answer \u001b[38;5;241m=\u001b[39m \u001b[43mrun_conv_glm\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmessages\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmessages\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtools\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtools\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmodel\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m      8\u001b[0m     \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m模型回答：\u001b[39m\u001b[38;5;132;01m{\u001b[39;00manswer\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m     10\u001b[0m     \u001b[38;5;66;03m# 询问用户是否还有其他问题\u001b[39;00m\n",
      "Cell \u001b[1;32mIn[33], line 4\u001b[0m, in \u001b[0;36mrun_conv_glm\u001b[1;34m(messages, stream, tools, max_retry, model)\u001b[0m\n\u001b[0;32m      1\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mrun_conv_glm\u001b[39m(messages, stream\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m, tools\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m, max_retry\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m5\u001b[39m, model\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mglm-4\u001b[39m\u001b[38;5;124m'\u001b[39m):\n\u001b[0;32m      3\u001b[0m     \u001b[38;5;28;01mfor\u001b[39;00m _ \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(max_retry):\n\u001b[1;32m----> 4\u001b[0m         model_response \u001b[38;5;241m=\u001b[39m \u001b[43mclient\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mchat\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcompletions\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m      5\u001b[0m \u001b[43m                \u001b[49m\u001b[43mmodel\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m      6\u001b[0m \u001b[43m                \u001b[49m\u001b[43mmessages\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m      7\u001b[0m \u001b[43m                \u001b[49m\u001b[43mtools\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[43mtools\u001b[49m\n\u001b[0;32m      8\u001b[0m \u001b[43m            \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m      9\u001b[0m         \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m本次消耗tokens: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mmodel_response\u001b[38;5;241m.\u001b[39musage\u001b[38;5;241m.\u001b[39mtotal_tokens\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m     10\u001b[0m         messages\u001b[38;5;241m.\u001b[39mappend(model_response\u001b[38;5;241m.\u001b[39mchoices[\u001b[38;5;241m0\u001b[39m]\u001b[38;5;241m.\u001b[39mmessage\u001b[38;5;241m.\u001b[39mmodel_dump())\n",
      "File \u001b[1;32mc:\\Users\\a8733\\.conda\\envs\\chatglm\\lib\\site-packages\\zhipuai\\api_resource\\chat\\completions.py:48\u001b[0m, in \u001b[0;36mCompletions.create\u001b[1;34m(self, model, request_id, do_sample, stream, temperature, top_p, max_tokens, seed, messages, stop, sensitive_word_check, tools, tool_choice, extra_headers, disable_strict_validation, timeout)\u001b[0m\n\u001b[0;32m     46\u001b[0m     _cast_type \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mobject\u001b[39m\n\u001b[0;32m     47\u001b[0m     _stream_cls \u001b[38;5;241m=\u001b[39m StreamResponse[\u001b[38;5;28mobject\u001b[39m]\n\u001b[1;32m---> 48\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_post\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m     49\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m/chat/completions\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m     50\u001b[0m \u001b[43m    \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m{\u001b[49m\n\u001b[0;32m     51\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmodel\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     52\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrequest_id\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mrequest_id\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     53\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtemperature\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtemperature\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     54\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtop_p\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtop_p\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     55\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdo_sample\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mdo_sample\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     56\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmax_tokens\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmax_tokens\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     57\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mseed\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mseed\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     58\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmessages\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mmessages\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     59\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mstop\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     60\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43msensitive_word_check\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43msensitive_word_check\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     61\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mstream\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mstream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     62\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtools\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtools\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     63\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtool_choice\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mtool_choice\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     64\u001b[0m \u001b[43m    \u001b[49m\u001b[43m}\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     65\u001b[0m \u001b[43m    \u001b[49m\u001b[43moptions\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmake_user_request_input\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m     66\u001b[0m \u001b[43m        \u001b[49m\u001b[43mextra_headers\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mextra_headers\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     67\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     68\u001b[0m \u001b[43m    \u001b[49m\u001b[43mcast_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_cast_type\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     69\u001b[0m \u001b[43m    \u001b[49m\u001b[43menable_stream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mFalse\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m     70\u001b[0m \u001b[43m    \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_stream_cls\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m     71\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mc:\\Users\\a8733\\.conda\\envs\\chatglm\\lib\\site-packages\\zhipuai\\core\\_http_client.py:292\u001b[0m, in \u001b[0;36mHttpClient.post\u001b[1;34m(self, path, body, cast_type, options, files, enable_stream, stream_cls)\u001b[0m\n\u001b[0;32m    278\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mpost\u001b[39m(\n\u001b[0;32m    279\u001b[0m         \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    280\u001b[0m         path: \u001b[38;5;28mstr\u001b[39m,\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    287\u001b[0m         stream_cls: \u001b[38;5;28mtype\u001b[39m[StreamResponse[Any]] \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m    288\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m ResponseT \u001b[38;5;241m|\u001b[39m StreamResponse:\n\u001b[0;32m    289\u001b[0m     opts \u001b[38;5;241m=\u001b[39m ClientRequestParam\u001b[38;5;241m.\u001b[39mconstruct(method\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpost\u001b[39m\u001b[38;5;124m\"\u001b[39m, json_data\u001b[38;5;241m=\u001b[39mbody, files\u001b[38;5;241m=\u001b[39mmake_httpx_files(files), url\u001b[38;5;241m=\u001b[39mpath,\n\u001b[0;32m    290\u001b[0m                                         \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39moptions)\n\u001b[1;32m--> 292\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    293\u001b[0m \u001b[43m        \u001b[49m\u001b[43mcast_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcast_type\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mparams\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mopts\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    294\u001b[0m \u001b[43m        \u001b[49m\u001b[43menable_stream\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43menable_stream\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    295\u001b[0m \u001b[43m        \u001b[49m\u001b[43mstream_cls\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstream_cls\u001b[49m\n\u001b[0;32m    296\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mc:\\Users\\a8733\\.conda\\envs\\chatglm\\lib\\site-packages\\zhipuai\\core\\_http_client.py:251\u001b[0m, in \u001b[0;36mHttpClient.request\u001b[1;34m(self, cast_type, params, enable_stream, stream_cls)\u001b[0m\n\u001b[0;32m    249\u001b[0m     err\u001b[38;5;241m.\u001b[39mresponse\u001b[38;5;241m.\u001b[39mread()\n\u001b[0;32m    250\u001b[0m     \u001b[38;5;66;03m# raise err\u001b[39;00m\n\u001b[1;32m--> 251\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_make_status_error(err\u001b[38;5;241m.\u001b[39mresponse) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m    253\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m err:\n\u001b[0;32m    254\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m err\n",
      "\u001b[1;31mAPIInternalError\u001b[0m: Error code: 500, with error text {\"error\":{\"code\":\"500\",\"message\":\"Internal Error\"}}"
     ]
    }
   ],
   "source": [
    "chat_with_glm(\"请问今天上海天气怎么样？\", tools=get_tools(), model='glm-3-turbo')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "chatglm3-demo",
   "language": "python",
   "name": "chatglm3-demo"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
