{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "e76769fc-632e-47bf-8ef3-cd66feaf2bc1",
   "metadata": {},
   "source": [
    "# 1.安装LangChain"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "db3c143b-5228-49c5-8522-ebc5267126bf",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "pip install langchain"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "4d54658e-af4b-499d-b539-e09c394f61c7",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting langchain-community\n",
      "  Downloading langchain_community-0.3.27-py3-none-any.whl.metadata (2.9 kB)\n",
      "Requirement already satisfied: langchain-core<1.0.0,>=0.3.66 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-community) (0.3.68)\n",
      "Requirement already satisfied: langchain<1.0.0,>=0.3.26 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-community) (0.3.26)\n",
      "Requirement already satisfied: SQLAlchemy<3,>=1.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-community) (2.0.41)\n",
      "Requirement already satisfied: requests<3,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-community) (2.32.4)\n",
      "Requirement already satisfied: PyYAML>=5.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-community) (6.0.2)\n",
      "Requirement already satisfied: aiohttp<4.0.0,>=3.8.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-community) (3.12.14)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<10,>=8.1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-community) (9.1.2)\n",
      "Collecting dataclasses-json<0.7,>=0.5.7 (from langchain-community)\n",
      "  Downloading dataclasses_json-0.6.7-py3-none-any.whl.metadata (25 kB)\n",
      "Collecting pydantic-settings<3.0.0,>=2.4.0 (from langchain-community)\n",
      "  Downloading pydantic_settings-2.10.1-py3-none-any.whl.metadata (3.4 kB)\n",
      "Requirement already satisfied: langsmith>=0.1.125 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-community) (0.4.5)\n",
      "Collecting httpx-sse<1.0.0,>=0.4.0 (from langchain-community)\n",
      "  Downloading httpx_sse-0.4.1-py3-none-any.whl.metadata (9.4 kB)\n",
      "Collecting numpy>=2.1.0 (from langchain-community)\n",
      "  Downloading numpy-2.3.1-cp313-cp313-win_amd64.whl.metadata (60 kB)\n",
      "Requirement already satisfied: aiohappyeyeballs>=2.5.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (2.6.1)\n",
      "Requirement already satisfied: aiosignal>=1.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (1.4.0)\n",
      "Requirement already satisfied: attrs>=17.3.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (24.3.0)\n",
      "Requirement already satisfied: frozenlist>=1.1.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (1.7.0)\n",
      "Requirement already satisfied: multidict<7.0,>=4.5 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (6.6.3)\n",
      "Requirement already satisfied: propcache>=0.2.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (0.3.2)\n",
      "Requirement already satisfied: yarl<2.0,>=1.17.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain-community) (1.20.1)\n",
      "Collecting marshmallow<4.0.0,>=3.18.0 (from dataclasses-json<0.7,>=0.5.7->langchain-community)\n",
      "  Downloading marshmallow-3.26.1-py3-none-any.whl.metadata (7.3 kB)\n",
      "Collecting typing-inspect<1,>=0.4.0 (from dataclasses-json<0.7,>=0.5.7->langchain-community)\n",
      "  Downloading typing_inspect-0.9.0-py3-none-any.whl.metadata (1.5 kB)\n",
      "Requirement already satisfied: langchain-text-splitters<1.0.0,>=0.3.8 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<1.0.0,>=0.3.26->langchain-community) (0.3.8)\n",
      "Requirement already satisfied: pydantic<3.0.0,>=2.7.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<1.0.0,>=0.3.26->langchain-community) (2.11.7)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain-community) (1.33)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain-community) (24.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain-community) (4.12.2)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.66->langchain-community) (3.0.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<1.0.0,>=0.3.26->langchain-community) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.33.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<1.0.0,>=0.3.26->langchain-community) (2.33.2)\n",
      "Requirement already satisfied: typing-inspection>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<1.0.0,>=0.3.26->langchain-community) (0.4.1)\n",
      "Requirement already satisfied: python-dotenv>=0.21.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic-settings<3.0.0,>=2.4.0->langchain-community) (1.1.1)\n",
      "Requirement already satisfied: charset_normalizer<4,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain-community) (3.3.2)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain-community) (3.7)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain-community) (2.5.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain-community) (2025.6.15)\n",
      "Requirement already satisfied: greenlet>=1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from SQLAlchemy<3,>=1.4->langchain-community) (3.2.3)\n",
      "Collecting mypy-extensions>=0.3.0 (from typing-inspect<1,>=0.4.0->dataclasses-json<0.7,>=0.5.7->langchain-community)\n",
      "  Downloading mypy_extensions-1.1.0-py3-none-any.whl.metadata (1.1 kB)\n",
      "Requirement already satisfied: httpx<1,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.125->langchain-community) (0.28.1)\n",
      "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.125->langchain-community) (3.10.18)\n",
      "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.125->langchain-community) (1.0.0)\n",
      "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.125->langchain-community) (0.23.0)\n",
      "Requirement already satisfied: anyio in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.1.125->langchain-community) (4.7.0)\n",
      "Requirement already satisfied: httpcore==1.* in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.1.125->langchain-community) (1.0.9)\n",
      "Requirement already satisfied: h11>=0.16 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith>=0.1.125->langchain-community) (0.16.0)\n",
      "Requirement already satisfied: sniffio>=1.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from anyio->httpx<1,>=0.23.0->langsmith>=0.1.125->langchain-community) (1.3.0)\n",
      "Downloading langchain_community-0.3.27-py3-none-any.whl (2.5 MB)\n",
      "   ---------------------------------------- 0.0/2.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/2.5 MB ? eta -:--:--\n",
      "   -------- ------------------------------- 0.5/2.5 MB 1.9 MB/s eta 0:00:02\n",
      "   ------------ --------------------------- 0.8/2.5 MB 1.5 MB/s eta 0:00:02\n",
      "   ---------------- ----------------------- 1.0/2.5 MB 1.4 MB/s eta 0:00:02\n",
      "   -------------------- ------------------- 1.3/2.5 MB 1.4 MB/s eta 0:00:01\n",
      "   ------------------------ --------------- 1.6/2.5 MB 1.3 MB/s eta 0:00:01\n",
      "   ----------------------------- ---------- 1.8/2.5 MB 1.3 MB/s eta 0:00:01\n",
      "   --------------------------------- ------ 2.1/2.5 MB 1.3 MB/s eta 0:00:01\n",
      "   ------------------------------------- -- 2.4/2.5 MB 1.3 MB/s eta 0:00:01\n",
      "   ------------------------------------- -- 2.4/2.5 MB 1.3 MB/s eta 0:00:01\n",
      "   ------------------------------------- -- 2.4/2.5 MB 1.3 MB/s eta 0:00:01\n",
      "   ---------------------------------------- 2.5/2.5 MB 1.1 MB/s eta 0:00:00\n",
      "Downloading dataclasses_json-0.6.7-py3-none-any.whl (28 kB)\n",
      "Downloading httpx_sse-0.4.1-py3-none-any.whl (8.1 kB)\n",
      "Downloading marshmallow-3.26.1-py3-none-any.whl (50 kB)\n",
      "Downloading pydantic_settings-2.10.1-py3-none-any.whl (45 kB)\n",
      "Downloading typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n",
      "Downloading mypy_extensions-1.1.0-py3-none-any.whl (5.0 kB)\n",
      "Downloading numpy-2.3.1-cp313-cp313-win_amd64.whl (12.7 MB)\n",
      "   ---------------------------------------- 0.0/12.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/12.7 MB ? eta -:--:--\n",
      "    --------------------------------------- 0.3/12.7 MB ? eta -:--:--\n",
      "   - -------------------------------------- 0.5/12.7 MB 969.1 kB/s eta 0:00:13\n",
      "   - -------------------------------------- 0.5/12.7 MB 969.1 kB/s eta 0:00:13\n",
      "   - -------------------------------------- 0.5/12.7 MB 969.1 kB/s eta 0:00:13\n",
      "   -- ------------------------------------- 0.8/12.7 MB 693.8 kB/s eta 0:00:18\n",
      "   --- ------------------------------------ 1.0/12.7 MB 736.3 kB/s eta 0:00:16\n",
      "   --- ------------------------------------ 1.0/12.7 MB 736.3 kB/s eta 0:00:16\n",
      "   ---- ----------------------------------- 1.3/12.7 MB 704.9 kB/s eta 0:00:17\n",
      "   ---- ----------------------------------- 1.3/12.7 MB 704.9 kB/s eta 0:00:17\n",
      "   ---- ----------------------------------- 1.6/12.7 MB 713.3 kB/s eta 0:00:16\n",
      "   ----- ---------------------------------- 1.8/12.7 MB 703.2 kB/s eta 0:00:16\n",
      "   ----- ---------------------------------- 1.8/12.7 MB 703.2 kB/s eta 0:00:16\n",
      "   ----- ---------------------------------- 1.8/12.7 MB 703.2 kB/s eta 0:00:16\n",
      "   ------ --------------------------------- 2.1/12.7 MB 671.8 kB/s eta 0:00:16\n",
      "   ------ --------------------------------- 2.1/12.7 MB 671.8 kB/s eta 0:00:16\n",
      "   ------- -------------------------------- 2.4/12.7 MB 663.8 kB/s eta 0:00:16\n",
      "   ------- -------------------------------- 2.4/12.7 MB 663.8 kB/s eta 0:00:16\n",
      "   -------- ------------------------------- 2.6/12.7 MB 657.1 kB/s eta 0:00:16\n",
      "   --------- ------------------------------ 2.9/12.7 MB 662.9 kB/s eta 0:00:15\n",
      "   --------- ------------------------------ 2.9/12.7 MB 662.9 kB/s eta 0:00:15\n",
      "   --------- ------------------------------ 3.1/12.7 MB 674.0 kB/s eta 0:00:15\n",
      "   --------- ------------------------------ 3.1/12.7 MB 674.0 kB/s eta 0:00:15\n",
      "   ---------- ----------------------------- 3.4/12.7 MB 665.3 kB/s eta 0:00:15\n",
      "   ---------- ----------------------------- 3.4/12.7 MB 665.3 kB/s eta 0:00:15\n",
      "   ----------- ---------------------------- 3.7/12.7 MB 650.9 kB/s eta 0:00:14\n",
      "   ----------- ---------------------------- 3.7/12.7 MB 650.9 kB/s eta 0:00:14\n",
      "   ------------ --------------------------- 3.9/12.7 MB 667.0 kB/s eta 0:00:14\n",
      "   ------------- -------------------------- 4.2/12.7 MB 675.5 kB/s eta 0:00:13\n",
      "   -------------- ------------------------- 4.5/12.7 MB 689.2 kB/s eta 0:00:13\n",
      "   -------------- ------------------------- 4.5/12.7 MB 689.2 kB/s eta 0:00:13\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   -------------- ------------------------- 4.7/12.7 MB 695.7 kB/s eta 0:00:12\n",
      "   --------------- ------------------------ 5.0/12.7 MB 558.2 kB/s eta 0:00:14\n",
      "   ---------------- ----------------------- 5.2/12.7 MB 568.1 kB/s eta 0:00:14\n",
      "   ---------------- ----------------------- 5.2/12.7 MB 568.1 kB/s eta 0:00:14\n",
      "   ----------------- ---------------------- 5.5/12.7 MB 576.3 kB/s eta 0:00:13\n",
      "   ------------------ --------------------- 5.8/12.7 MB 580.3 kB/s eta 0:00:13\n",
      "   ------------------ --------------------- 5.8/12.7 MB 580.3 kB/s eta 0:00:13\n",
      "   ------------------ --------------------- 6.0/12.7 MB 586.4 kB/s eta 0:00:12\n",
      "   ------------------ --------------------- 6.0/12.7 MB 586.4 kB/s eta 0:00:12\n",
      "   ------------------- -------------------- 6.3/12.7 MB 590.9 kB/s eta 0:00:11\n",
      "   ------------------- -------------------- 6.3/12.7 MB 590.9 kB/s eta 0:00:11\n",
      "   -------------------- ------------------- 6.6/12.7 MB 593.9 kB/s eta 0:00:11\n",
      "   --------------------- ------------------ 6.8/12.7 MB 595.7 kB/s eta 0:00:10\n",
      "   --------------------- ------------------ 6.8/12.7 MB 595.7 kB/s eta 0:00:10\n",
      "   --------------------- ------------------ 6.8/12.7 MB 595.7 kB/s eta 0:00:10\n",
      "   ---------------------- ----------------- 7.1/12.7 MB 590.7 kB/s eta 0:00:10\n",
      "   ---------------------- ----------------- 7.1/12.7 MB 590.7 kB/s eta 0:00:10\n",
      "   ---------------------- ----------------- 7.1/12.7 MB 590.7 kB/s eta 0:00:10\n",
      "   ----------------------- ---------------- 7.3/12.7 MB 585.7 kB/s eta 0:00:10\n",
      "   ----------------------- ---------------- 7.6/12.7 MB 591.0 kB/s eta 0:00:09\n",
      "   ----------------------- ---------------- 7.6/12.7 MB 591.0 kB/s eta 0:00:09\n",
      "   ------------------------ --------------- 7.9/12.7 MB 589.8 kB/s eta 0:00:09\n",
      "   ------------------------ --------------- 7.9/12.7 MB 589.8 kB/s eta 0:00:09\n",
      "   ------------------------- -------------- 8.1/12.7 MB 591.3 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 8.1/12.7 MB 591.3 kB/s eta 0:00:08\n",
      "   -------------------------- ------------- 8.4/12.7 MB 591.7 kB/s eta 0:00:08\n",
      "   -------------------------- ------------- 8.4/12.7 MB 591.7 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 8.7/12.7 MB 592.0 kB/s eta 0:00:07\n",
      "   --------------------------- ------------ 8.7/12.7 MB 592.0 kB/s eta 0:00:07\n",
      "   ---------------------------- ----------- 8.9/12.7 MB 598.3 kB/s eta 0:00:07\n",
      "   ---------------------------- ----------- 9.2/12.7 MB 602.4 kB/s eta 0:00:06\n",
      "   ---------------------------- ----------- 9.2/12.7 MB 602.4 kB/s eta 0:00:06\n",
      "   ----------------------------- ---------- 9.4/12.7 MB 608.2 kB/s eta 0:00:06\n",
      "   ------------------------------ --------- 9.7/12.7 MB 610.1 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 9.7/12.7 MB 610.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 10.0/12.7 MB 613.9 kB/s eta 0:00:05\n",
      "   -------------------------------- ------- 10.2/12.7 MB 618.0 kB/s eta 0:00:05\n",
      "   -------------------------------- ------- 10.2/12.7 MB 618.0 kB/s eta 0:00:05\n",
      "   -------------------------------- ------- 10.5/12.7 MB 624.4 kB/s eta 0:00:04\n",
      "   --------------------------------- ------ 10.7/12.7 MB 628.2 kB/s eta 0:00:04\n",
      "   --------------------------------- ------ 10.7/12.7 MB 628.2 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 11.0/12.7 MB 629.8 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 11.3/12.7 MB 633.8 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 11.3/12.7 MB 633.8 kB/s eta 0:00:03\n",
      "   ------------------------------------ --- 11.5/12.7 MB 638.9 kB/s eta 0:00:02\n",
      "   ------------------------------------- -- 11.8/12.7 MB 642.1 kB/s eta 0:00:02\n",
      "   ------------------------------------- -- 11.8/12.7 MB 642.1 kB/s eta 0:00:02\n",
      "   ------------------------------------- -- 12.1/12.7 MB 643.0 kB/s eta 0:00:02\n",
      "   -------------------------------------- - 12.3/12.7 MB 644.7 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 12.3/12.7 MB 644.7 kB/s eta 0:00:01\n",
      "   ---------------------------------------  12.6/12.7 MB 646.7 kB/s eta 0:00:01\n",
      "   ---------------------------------------- 12.7/12.7 MB 646.4 kB/s eta 0:00:00\n",
      "Installing collected packages: numpy, mypy-extensions, marshmallow, httpx-sse, typing-inspect, pydantic-settings, dataclasses-json, langchain-community\n",
      "\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------------------------------------- 0/8 [numpy]\n",
      "   ---------- ----------------------------- 2/8 [marshmallow]\n",
      "   -------------------- ------------------- 4/8 [typing-inspect]\n",
      "   ------------------------- -------------- 5/8 [pydantic-settings]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ----------------------------------- ---- 7/8 [langchain-community]\n",
      "   ---------------------------------------- 8/8 [langchain-community]\n",
      "\n",
      "Successfully installed dataclasses-json-0.6.7 httpx-sse-0.4.1 langchain-community-0.3.27 marshmallow-3.26.1 mypy-extensions-1.1.0 numpy-2.3.1 pydantic-settings-2.10.1 typing-inspect-0.9.0\n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip install langchain-community"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "e89577de-c021-4a48-b864-ce9f207ffa1a",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Name: langchain\n",
      "Version: 0.3.26\n",
      "Summary: Building applications with LLMs through composability\n",
      "Home-page: \n",
      "Author: \n",
      "Author-email: \n",
      "License: MIT\n",
      "Location: D:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\n",
      "Requires: langchain-core, langchain-text-splitters, langsmith, pydantic, PyYAML, requests, SQLAlchemy\n",
      "Required-by: langchain-community\n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip show langchain"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "b497867e-6e9f-44d6-9169-a706a4f1be64",
   "metadata": {},
   "source": [
    "# 2.配置DeepSeek_API_KEY"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "887952c1-0348-4b9b-a4f3-5b54bf1b7eb1",
   "metadata": {},
   "source": [
    "### 2.1安装python-dotenv库读取环境变量"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "6540f5f2-515b-4bcc-a97d-fb7a425f9d7c",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Requirement already satisfied: python-dotenv in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (1.1.1)\n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip install python-dotenv"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "df4dbeeb-795b-46fc-bb81-af13e4ea6390",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Name: python-dotenv\n",
      "Version: 1.1.1\n",
      "Summary: Read key-value pairs from a .env file and set them as environment variables\n",
      "Home-page: https://github.com/theskumar/python-dotenv\n",
      "Author: Saurabh Kumar\n",
      "Author-email: me+github@saurabh-kumar.com\n",
      "License: BSD-3-Clause\n",
      "Location: D:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\n",
      "Requires: \n",
      "Required-by: pydantic-settings\n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip show python-dotenv"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 65,
   "id": "40b87c8e-e58f-4185-a623-fb82962d1f06",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "sk-5d2095ddb454451cb03383a7e36f1fed\n"
     ]
    }
   ],
   "source": [
    "import os\n",
    "from dotenv import load_dotenv\n",
    "load_dotenv(override=True)\n",
    "\n",
    "DEEPSEEK_API_KEY = os.getenv(\"DEEPSEEK_API_KEY\")\n",
    "print(DEEPSEEK_API_KEY)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "c6f7206c-a783-4dda-b69e-6488d49c1b0c",
   "metadata": {},
   "source": [
    "### 2.2测试DEEPSEEK API网络连通性"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "262bfb0b-aabe-47e3-8518-fa6744d1ef67",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting openai\n",
      "  Downloading openai-1.95.1-py3-none-any.whl.metadata (29 kB)\n",
      "Requirement already satisfied: anyio<5,>=3.5.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai) (4.7.0)\n",
      "Collecting distro<2,>=1.7.0 (from openai)\n",
      "  Downloading distro-1.9.0-py3-none-any.whl.metadata (6.8 kB)\n",
      "Requirement already satisfied: httpx<1,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai) (0.28.1)\n",
      "Collecting jiter<1,>=0.4.0 (from openai)\n",
      "  Downloading jiter-0.10.0-cp313-cp313-win_amd64.whl.metadata (5.3 kB)\n",
      "Requirement already satisfied: pydantic<3,>=1.9.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai) (2.11.7)\n",
      "Requirement already satisfied: sniffio in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai) (1.3.0)\n",
      "Collecting tqdm>4 (from openai)\n",
      "  Downloading tqdm-4.67.1-py3-none-any.whl.metadata (57 kB)\n",
      "Requirement already satisfied: typing-extensions<5,>=4.11 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai) (4.12.2)\n",
      "Requirement already satisfied: idna>=2.8 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from anyio<5,>=3.5.0->openai) (3.7)\n",
      "Requirement already satisfied: certifi in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->openai) (2025.6.15)\n",
      "Requirement already satisfied: httpcore==1.* in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->openai) (1.0.9)\n",
      "Requirement already satisfied: h11>=0.16 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai) (0.16.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3,>=1.9.0->openai) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.33.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3,>=1.9.0->openai) (2.33.2)\n",
      "Requirement already satisfied: typing-inspection>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3,>=1.9.0->openai) (0.4.1)\n",
      "Requirement already satisfied: colorama in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from tqdm>4->openai) (0.4.6)\n",
      "Downloading openai-1.95.1-py3-none-any.whl (755 kB)\n",
      "   ---------------------------------------- 0.0/755.6 kB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/755.6 kB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/755.6 kB ? eta -:--:--\n",
      "   ------------- -------------------------- 262.1/755.6 kB ? eta -:--:--\n",
      "   --------------------------- ------------ 524.3/755.6 kB 1.3 MB/s eta 0:00:01\n",
      "   --------------------------- ------------ 524.3/755.6 kB 1.3 MB/s eta 0:00:01\n",
      "   -------------------------------------- 755.6/755.6 kB 681.9 kB/s eta 0:00:00\n",
      "Downloading distro-1.9.0-py3-none-any.whl (20 kB)\n",
      "Downloading jiter-0.10.0-cp313-cp313-win_amd64.whl (205 kB)\n",
      "Downloading tqdm-4.67.1-py3-none-any.whl (78 kB)\n",
      "Installing collected packages: tqdm, jiter, distro, openai\n",
      "\n",
      "   ---------------------------------------- 0/4 [tqdm]\n",
      "   ---------------------------------------- 0/4 [tqdm]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ------------------------------ --------- 3/4 [openai]\n",
      "   ---------------------------------------- 4/4 [openai]\n",
      "\n",
      "Successfully installed distro-1.9.0 jiter-0.10.0 openai-1.95.1 tqdm-4.67.1\n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip install openai"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "ae810cbc-f91e-431a-a83f-bb9888193fe6",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "你好！我是 **DeepSeek Chat**，由深度求索公司（DeepSeek）研发的智能 AI 助手。我的最新版本是 **DeepSeek-V3**，知识截止到 **2024年7月**，拥有 **128K 上下文记忆**，可以处理复杂的长文本对话，并支持文件上传（如 PDF、Word、Excel 等）进行内容分析。  \n",
      "\n",
      "### ✨ **我的特点**  \n",
      "✅ **免费使用**：目前不收费，随时为你解答问题！  \n",
      "✅ **超长上下文**：可记住长达 128K 的对话内容，适合处理大段文本或复杂任务。  \n",
      "✅ **文件阅读**：支持上传文档，帮你总结、提取信息或分析内容。  \n",
      "✅ **知识丰富**：覆盖科技、编程、学术、生活等多个领域，能提供最新、最准确的解答。  \n",
      "✅ **多语言支持**：可以用中文、英文等多种语言交流。  \n",
      "\n",
      "### 🚀 **我能帮你做什么？**  \n",
      "- 📚 **学习 & 研究**：解答学术问题、论文写作、代码调试等。  \n",
      "- 💼 **工作助手**：撰写邮件、优化简历、分析数据等。  \n",
      "- 🏡 **生活百科**：旅行建议、美食推荐、健康小贴士等。  \n",
      "- 💡 **创意灵感**：写故事、生成文案、头脑风暴等。  \n",
      "\n",
      "如果你有任何问题，尽管问我！😊 你今天想聊些什么呢？\n"
     ]
    }
   ],
   "source": [
    "from openai import OpenAI\n",
    "\n",
    "# 初始化DEEPSEEK的API客户端\n",
    "client = OpenAI(api_key=DEEPSEEK_API_KEY,base_url=\"https://api.deepseek.com\")\n",
    "\n",
    "# 调用DeepSeeK的API，生成回答\n",
    "response = client.chat.completions.create(\n",
    "    model=\"deepseek-chat\",\n",
    "    messages=[\n",
    "        {\"role\":\"user\",\"content\":\"你好，介绍一下你自己\"}\n",
    "    ],\n",
    ")\n",
    "\n",
    "# 输出结果\n",
    "print(response.choices[0].message.content)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "0498add6-b604-4e52-9dd3-14c4106f24bd",
   "metadata": {},
   "source": [
    "# 3.DeepSeek接入Langchain"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "4650d57a-c8a8-4682-ae0a-4f498f3eaccb",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true,
     "source_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting langchain-deepseek\n",
      "  Downloading langchain_deepseek-0.1.3-py3-none-any.whl.metadata (1.1 kB)\n",
      "Requirement already satisfied: langchain-core<1.0.0,>=0.3.47 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-deepseek) (0.3.68)\n",
      "Collecting langchain-openai<1.0.0,>=0.3.9 (from langchain-deepseek)\n",
      "  Downloading langchain_openai-0.3.28-py3-none-any.whl.metadata (2.3 kB)\n",
      "Requirement already satisfied: langsmith>=0.3.45 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (0.4.5)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (9.1.2)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (1.33)\n",
      "Requirement already satisfied: PyYAML>=5.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (6.0.2)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (24.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (4.12.2)\n",
      "Requirement already satisfied: pydantic>=2.7.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (2.11.7)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (3.0.0)\n",
      "Requirement already satisfied: openai<2.0.0,>=1.86.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (1.95.1)\n",
      "Collecting tiktoken<1,>=0.7 (from langchain-openai<1.0.0,>=0.3.9->langchain-deepseek)\n",
      "  Downloading tiktoken-0.9.0-cp313-cp313-win_amd64.whl.metadata (6.8 kB)\n",
      "Requirement already satisfied: anyio<5,>=3.5.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (4.7.0)\n",
      "Requirement already satisfied: distro<2,>=1.7.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (1.9.0)\n",
      "Requirement already satisfied: httpx<1,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (0.28.1)\n",
      "Requirement already satisfied: jiter<1,>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (0.10.0)\n",
      "Requirement already satisfied: sniffio in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (1.3.0)\n",
      "Requirement already satisfied: tqdm>4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (4.67.1)\n",
      "Requirement already satisfied: idna>=2.8 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from anyio<5,>=3.5.0->openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (3.7)\n",
      "Requirement already satisfied: certifi in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (2025.6.15)\n",
      "Requirement already satisfied: httpcore==1.* in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (1.0.9)\n",
      "Requirement already satisfied: h11>=0.16 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (0.16.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.33.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (2.33.2)\n",
      "Requirement already satisfied: typing-inspection>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic>=2.7.4->langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (0.4.1)\n",
      "Collecting regex>=2022.1.18 (from tiktoken<1,>=0.7->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek)\n",
      "  Downloading regex-2024.11.6-cp313-cp313-win_amd64.whl.metadata (41 kB)\n",
      "Requirement already satisfied: requests>=2.26.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from tiktoken<1,>=0.7->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (2.32.4)\n",
      "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.3.45->langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (3.10.18)\n",
      "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.3.45->langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (1.0.0)\n",
      "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.3.45->langchain-core<1.0.0,>=0.3.47->langchain-deepseek) (0.23.0)\n",
      "Requirement already satisfied: charset_normalizer<4,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests>=2.26.0->tiktoken<1,>=0.7->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (3.3.2)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests>=2.26.0->tiktoken<1,>=0.7->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (2.5.0)\n",
      "Requirement already satisfied: colorama in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from tqdm>4->openai<2.0.0,>=1.86.0->langchain-openai<1.0.0,>=0.3.9->langchain-deepseek) (0.4.6)\n",
      "Downloading langchain_deepseek-0.1.3-py3-none-any.whl (7.1 kB)\n",
      "Downloading langchain_openai-0.3.28-py3-none-any.whl (70 kB)\n",
      "Downloading tiktoken-0.9.0-cp313-cp313-win_amd64.whl (894 kB)\n",
      "   ---------------------------------------- 0.0/894.7 kB ? eta -:--:--\n",
      "   ----------- ---------------------------- 262.1/894.7 kB ? eta -:--:--\n",
      "   ---------------------------------------- 894.7/894.7 kB 3.1 MB/s eta 0:00:00\n",
      "Downloading regex-2024.11.6-cp313-cp313-win_amd64.whl (273 kB)\n",
      "Installing collected packages: regex, tiktoken, langchain-openai, langchain-deepseek\n",
      "\n",
      "   ---------------------------------------- 0/4 [regex]\n",
      "   -------------------- ------------------- 2/4 [langchain-openai]\n",
      "   -------------------- ------------------- 2/4 [langchain-openai]\n",
      "   ---------------------------------------- 4/4 [langchain-deepseek]\n",
      "\n",
      "Successfully installed langchain-deepseek-0.1.3 langchain-openai-0.3.28 regex-2024.11.6 tiktoken-0.9.0\n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip install langchain-deepseek"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "436a11ec-dd70-40bd-8e2d-9630b7791953",
   "metadata": {},
   "source": [
    "> 安装好`Langchain`集成`DeepSeek`模型的依赖包后，需要通过一个`init_chat_model`函数来初始化大模型，代码如下\n",
    "> 简单交互, v3的model是`deepseek-chat`,r1的model是`deepseek-reasoner`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 68,
   "id": "49a2353a-d73f-41f4-af80-4040807b41bb",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "**Radiohead**（电台司令）是一支来自英国牛津的传奇摇滚乐队，成立于1985年。他们以实验性的音乐风格、深刻的歌词和前卫的电子元素闻名，被认为是当代最具影响力的乐队之一。\n",
      "\n",
      "### 核心成员：\n",
      "- **Thom Yorke**（主唱、吉他、钢琴）\n",
      "- **Jonny Greenwood**（主音吉他、键盘、合成器）\n",
      "- **Ed O'Brien**（吉他、和声）\n",
      "- **Colin Greenwood**（贝斯）\n",
      "- **Philip Selway**（鼓手）\n",
      "\n",
      "### 音乐风格：\n",
      "早期受另类摇滚（如《Pablo Honey》中的经典曲目《Creep》）和后朋克影响，后逐渐转向实验摇滚、电子乐和氛围音乐，融合爵士、古典等元素，风格多变且难以归类。\n",
      "\n",
      "### 关键专辑：\n",
      "1. **《OK Computer》**（1997）  \n",
      "   被誉为首张“21世纪摇滚专辑”，探讨科技异化与现代社会焦虑，如《Paranoid Android》《Karma Police》。\n",
      "2. **《Kid A》**（2000）  \n",
      "   彻底颠覆传统摇滚，引入电子、氛围音效，标志乐队转型巅峰（如《Everything in Its Right Place》）。\n",
      "3. **《In Rainbows》**（2007）  \n",
      "   以“自助定价”数字发行轰动乐坛，音乐兼具情感与实验性（《Reckoner》《Weird Fishes》）。\n",
      "\n",
      "### 其他成就：\n",
      "- **现场演出**：以沉浸式灯光和音效著称，注重艺术表达。\n",
      "- **社会议题**：关注环保、政治，Thom Yorke多次参与气候变化活动。\n",
      "- **奖项**：多次获格莱美奖，入选摇滚名人堂（2019）。\n",
      "\n",
      "### 影响：\n",
      "启发Muse、The 1975等众多乐队，持续推动主流与独立音乐的边界。近年作品如《A Moon Shaped Pool》（2016）延续忧郁与实验风格。\n",
      "\n",
      "Radiohead以拒绝重复、挑战听众的音乐哲学，成为当代文化的标志性声音。\n"
     ]
    }
   ],
   "source": [
    "from langchain.chat_models import init_chat_model\n",
    "\n",
    "model = init_chat_model(model=\"deepseek-chat\",model_provider=\"deepseek\",api_key=DEEPSEEK_API_KEY)\n",
    "question = \"简单介绍一下Radiohead\"\n",
    "\n",
    "result = model.invoke(question)\n",
    "print(result.content)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1453f2c2-23e6-41a0-b732-bfd17ac3f8ed",
   "metadata": {},
   "source": [
    "# 4.集成DASHSCOPE到LangChain"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "4c89a0ef-f316-461f-8be5-aeb34922524b",
   "metadata": {},
   "source": [
    "### 4.1OPENAI-API示例"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "52da6bca-6555-4dbf-b657-e6ce6d08476a",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{\"id\":\"chatcmpl-a9071a69-d6cf-9cad-b0a2-5a3018ed4613\",\"choices\":[{\"finish_reason\":\"stop\",\"index\":0,\"logprobs\":null,\"message\":{\"content\":\"你好！我是Qwen，是阿里巴巴集团旗下的通义实验室自主研发的超大规模语言模型。我可以帮助你回答问题、创作文字，比如写故事、写公文、写邮件、写剧本、逻辑推理、编程等等，还能表达观点，玩游戏等。我支持多种语言，包括但不限于中文、英文、德语、法语、西班牙语等。如果你有任何问题或需要帮助，随时告诉我！\",\"refusal\":null,\"role\":\"assistant\",\"annotations\":null,\"audio\":null,\"function_call\":null,\"tool_calls\":null}}],\"created\":1752861733,\"model\":\"qwen-plus\",\"object\":\"chat.completion\",\"service_tier\":null,\"system_fingerprint\":null,\"usage\":{\"completion_tokens\":87,\"prompt_tokens\":16,\"total_tokens\":103,\"completion_tokens_details\":null,\"prompt_tokens_details\":{\"audio_tokens\":null,\"cached_tokens\":0}}}\n"
     ]
    }
   ],
   "source": [
    "import os\n",
    "from openai import OpenAI\n",
    "\n",
    "client = OpenAI(\n",
    "    api_key=os.getenv(\"DASHSCOPE_API_KEY\"),\n",
    "    base_url=\"https://dashscope.aliyuncs.com/compatible-mode/v1\"\n",
    ")\n",
    "\n",
    "completion = client.chat.completions.create(\n",
    "    model=\"qwen-plus\",\n",
    "    messages=[\n",
    "        {\"role\":\"user\",\"content\":\"你好，介绍一下你自己\"}\n",
    "    ]\n",
    ")\n",
    "\n",
    "print(completion.model_dump_json())"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "91cc4de1-2110-45fe-8bb4-dcecd9a77b1b",
   "metadata": {
    "jp-MarkdownHeadingCollapsed": true
   },
   "source": [
    "### 4.2安装 dashscope 的 python SDK包"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "c020c0d3-fd39-4c02-b65e-ae2985eb4847",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true,
     "source_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting dashscope\n",
      "  Downloading dashscope-1.23.8-py3-none-any.whl.metadata (7.1 kB)\n",
      "Collecting aiohttp (from dashscope)\n",
      "  Downloading aiohttp-3.12.14-cp313-cp313-win_amd64.whl.metadata (7.9 kB)\n",
      "Requirement already satisfied: requests in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from dashscope) (2.32.4)\n",
      "Requirement already satisfied: websocket-client in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from dashscope) (1.8.0)\n",
      "Collecting cryptography (from dashscope)\n",
      "  Downloading cryptography-45.0.5-cp311-abi3-win_amd64.whl.metadata (5.7 kB)\n",
      "Collecting aiohappyeyeballs>=2.5.0 (from aiohttp->dashscope)\n",
      "  Downloading aiohappyeyeballs-2.6.1-py3-none-any.whl.metadata (5.9 kB)\n",
      "Collecting aiosignal>=1.4.0 (from aiohttp->dashscope)\n",
      "  Downloading aiosignal-1.4.0-py3-none-any.whl.metadata (3.7 kB)\n",
      "Requirement already satisfied: attrs>=17.3.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp->dashscope) (24.3.0)\n",
      "Collecting frozenlist>=1.1.1 (from aiohttp->dashscope)\n",
      "  Downloading frozenlist-1.7.0-cp313-cp313-win_amd64.whl.metadata (19 kB)\n",
      "Collecting multidict<7.0,>=4.5 (from aiohttp->dashscope)\n",
      "  Downloading multidict-6.6.3-cp313-cp313-win_amd64.whl.metadata (5.4 kB)\n",
      "Collecting propcache>=0.2.0 (from aiohttp->dashscope)\n",
      "  Downloading propcache-0.3.2-cp313-cp313-win_amd64.whl.metadata (12 kB)\n",
      "Collecting yarl<2.0,>=1.17.0 (from aiohttp->dashscope)\n",
      "  Downloading yarl-1.20.1-cp313-cp313-win_amd64.whl.metadata (76 kB)\n",
      "Requirement already satisfied: idna>=2.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from yarl<2.0,>=1.17.0->aiohttp->dashscope) (3.7)\n",
      "Requirement already satisfied: cffi>=1.14 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from cryptography->dashscope) (1.17.1)\n",
      "Requirement already satisfied: pycparser in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from cffi>=1.14->cryptography->dashscope) (2.21)\n",
      "Requirement already satisfied: charset_normalizer<4,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests->dashscope) (3.3.2)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests->dashscope) (2.5.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests->dashscope) (2025.6.15)\n",
      "Downloading dashscope-1.23.8-py3-none-any.whl (1.3 MB)\n",
      "   ---------------------------------------- 0.0/1.3 MB ? eta -:--:--\n",
      "   ---------------------------------------- 1.3/1.3 MB 6.8 MB/s eta 0:00:00\n",
      "Downloading aiohttp-3.12.14-cp313-cp313-win_amd64.whl (448 kB)\n",
      "Downloading multidict-6.6.3-cp313-cp313-win_amd64.whl (45 kB)\n",
      "Downloading yarl-1.20.1-cp313-cp313-win_amd64.whl (86 kB)\n",
      "Downloading aiohappyeyeballs-2.6.1-py3-none-any.whl (15 kB)\n",
      "Downloading aiosignal-1.4.0-py3-none-any.whl (7.5 kB)\n",
      "Downloading frozenlist-1.7.0-cp313-cp313-win_amd64.whl (43 kB)\n",
      "Downloading propcache-0.3.2-cp313-cp313-win_amd64.whl (40 kB)\n",
      "Downloading cryptography-45.0.5-cp311-abi3-win_amd64.whl (3.4 MB)\n",
      "   ---------------------------------------- 0.0/3.4 MB ? eta -:--:--\n",
      "   ------------------------ --------------- 2.1/3.4 MB 11.1 MB/s eta 0:00:01\n",
      "   ---------------------------------------- 3.4/3.4 MB 8.9 MB/s eta 0:00:00\n",
      "Installing collected packages: propcache, multidict, frozenlist, aiohappyeyeballs, yarl, cryptography, aiosignal, aiohttp, dashscope\n",
      "\n",
      "   -------- ------------------------------- 2/9 [frozenlist]\n",
      "   ----------------- ---------------------- 4/9 [yarl]\n",
      "   ---------------------- ----------------- 5/9 [cryptography]\n",
      "   ---------------------- ----------------- 5/9 [cryptography]\n",
      "   ---------------------- ----------------- 5/9 [cryptography]\n",
      "   ---------------------- ----------------- 5/9 [cryptography]\n",
      "   ---------------------- ----------------- 5/9 [cryptography]\n",
      "   ------------------------------- -------- 7/9 [aiohttp]\n",
      "   ------------------------------- -------- 7/9 [aiohttp]\n",
      "   ------------------------------- -------- 7/9 [aiohttp]\n",
      "   ------------------------------- -------- 7/9 [aiohttp]\n",
      "   ------------------------------- -------- 7/9 [aiohttp]\n",
      "   ----------------------------------- ---- 8/9 [dashscope]\n",
      "   ----------------------------------- ---- 8/9 [dashscope]\n",
      "   ----------------------------------- ---- 8/9 [dashscope]\n",
      "   ----------------------------------- ---- 8/9 [dashscope]\n",
      "   ----------------------------------- ---- 8/9 [dashscope]\n",
      "   ----------------------------------- ---- 8/9 [dashscope]\n",
      "   ---------------------------------------- 9/9 [dashscope]\n",
      "\n",
      "Successfully installed aiohappyeyeballs-2.6.1 aiohttp-3.12.14 aiosignal-1.4.0 cryptography-45.0.5 dashscope-1.23.8 frozenlist-1.7.0 multidict-6.6.3 propcache-0.3.2 yarl-1.20.1\n"
     ]
    }
   ],
   "source": [
    "!pip install --upgrade dashscope "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "983c2883-511b-4a32-b988-c6febfbbde77",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true,
     "source_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Name: dashscope\n",
      "Version: 1.23.8\n",
      "Summary: dashscope client sdk library\n",
      "Home-page: https://dashscope.aliyun.com/\n",
      "Author: Alibaba Cloud\n",
      "Author-email: dashscope@alibabacloud.com\n",
      "License: Apache 2.0\n",
      "Location: D:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\n",
      "Requires: aiohttp, cryptography, requests, websocket-client\n",
      "Required-by: \n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip show dashscope"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "37e9cde6-1819-4d34-b146-199fe891b2cd",
   "metadata": {
    "jp-MarkdownHeadingCollapsed": true
   },
   "source": [
    "### 4.3初始化使用`ChatTongyi`而不是`init_chat_model`"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "a74812c0-d9c7-4595-b398-47caac237d96",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "你好！Radiohead 是一支来自英国的著名摇滚乐队，成立于1985年，最初名为 On a Friday，在1990年代末期更名为 Radiohead。他们被认为是21世纪最具创新性和影响力的乐队之一，以其独特的音乐风格、深刻的歌词和对音乐技术的探索而闻名。\n",
      "\n",
      "### 乐队成员：\n",
      "- **Thom Yorke**（主唱、键盘、吉他）\n",
      "- **Jonny Greenwood**（吉他、键盘、小提琴）\n",
      "- **Colin Greenwood**（贝斯）\n",
      "- **Ed O'Brien**（吉他、效果器）\n",
      "- **Phil Selway**（鼓、打击乐）\n",
      "\n",
      "### 音乐风格：\n",
      "Radiohead 的音乐融合了多种风格，包括：\n",
      "- **另类摇滚（Alternative Rock）**\n",
      "- **电子音乐（Electronic Music）**\n",
      "- **实验摇滚（Experimental Rock）**\n",
      "- **后朋克（Post-Punk）**\n",
      "- **氛围音乐（Ambient Music）**\n",
      "\n",
      "他们的音乐常常带有前卫、复杂和深邃的特点，打破了传统摇滚的界限。\n",
      "\n",
      "### 代表作品：\n",
      "- **《Pablo Honey》**（1993） – 早期作品，奠定了乐队的基础。\n",
      "- **《The Bends》**（1995） – 获得广泛好评，其中单曲《Fake Plastic Trees》和《No Surprises》成为经典。\n",
      "- **《OK Computer》**（1997） – 被认为是他们最伟大的专辑之一，探讨了科技、孤独与现代社会的主题。单曲《Paranoid Android》和《Karma Police》广受赞誉。\n",
      "- **《Kid A》**（2000） – 这张专辑标志着乐队风格的重大转变，大量使用电子元素和实验性编曲，被许多乐评人视为现代音乐的里程碑。\n",
      "- **《Amnesiac》**（2001） – 与《Kid A》风格相似，延续了实验性的音乐探索。\n",
      "- **《Hail to the Thief》**（2003） – 结合了摇滚与电子元素。\n",
      "- **《In Rainbows》**（2007） – 以“付费下载”模式发行，引发广泛关注，被认为是数字音乐时代的重要事件。\n",
      "- **《A Moon Shaped Pool》**（2016） – 更加抒情和氛围化的作品。\n",
      "\n",
      "### 影响与评价：\n",
      "Radiohead 被誉为“当代摇滚的先锋”，他们的音乐影响了许多后来的乐队和艺术家。他们也经常被与 Pink Floyd、Radiohead、The Velvet Underground 等乐队相提并论。\n",
      "\n",
      "### 特点：\n",
      "- **Thom Yorke 的声音**：他的嗓音独特，常带有忧郁和空灵的感觉。\n",
      "- **歌词主题**：涉及政治、科技、人类情感、环境问题等深刻议题。\n",
      "- **音乐创新**：不断尝试新的录音技术和音乐形式。\n",
      "\n",
      "如果你对某一张专辑或歌曲感兴趣，我可以为你详细介绍！\n"
     ]
    }
   ],
   "source": [
    "from langchain_community.chat_models.tongyi import ChatTongyi\n",
    "\n",
    "model = ChatTongyi()\n",
    "question = \"你好，介绍一下radiohead乐队。\"\n",
    "result = model.invoke(question)\n",
    "print(result.content)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "4f22a9dd-906b-4696-848b-f0c65c8040fc",
   "metadata": {},
   "source": [
    "# 5.LangChain入门"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "44d1ef69-e543-42c5-a86a-c3f9208d2211",
   "metadata": {},
   "source": [
    "### 5.1通过`|`管道操作符搭建Chains"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "6d895d78-78fb-409b-aa26-eabe411f7007",
   "metadata": {},
   "source": [
    "> 一个完整的`chain`一般包括prompt、LLM、parser"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "4dc12b6b-e2a4-406d-818f-e307da70b48d",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'作为一个Radiohead的乐迷，我必须说这个问题就像在问\"要氧气还是氮气\"——两者都是生存必需品！(笑)\\n\\nRadiohead的《OK Computer》对我来说就像21世纪的《月之暗面》，而Pink Floyd的《The Wall》里那些破碎的吉他音墙又在Radiohead的《Climbing Up the Walls》里获得了新生。Thom Yorke曾说他们偷学了Pink Floyd\"用音乐构建完整世界\"的能力，这大概就是为什么每当听《Paranoid Android》时，我总感觉看到了《Shine On You Crazy Diamond》在数字时代的倒影。\\n\\n不过说真的，与其二选一，不如把《Dark Side of the Moon》和《In Rainbows》背靠背播放——你会发现前者像在太空舱里凝视地球，后者则像在数据流中打捞人性，简直是跨越时空的完美双专辑体验。'"
      ]
     },
     "execution_count": 32,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from langchain_core.output_parsers import StrOutputParser\n",
    "from langchain.prompts import ChatPromptTemplate\n",
    "from langchain.chat_models import init_chat_model\n",
    "\n",
    "prompt_template = ChatPromptTemplate.from_messages([\n",
    "    (\"system\",\"你是一个RadioHead的乐迷\"),\n",
    "    (\"user\",\"这是用户的问题：{topic}\")\n",
    "])\n",
    "\n",
    "model = init_chat_model(model=\"deepseek-chat\",model_provider=\"deepseek\",api_key=DEEPSEEK_API_KEY)\n",
    "\n",
    "question = \"你喜欢PinkFloyd还是RadioHead\"\n",
    "\n",
    "# chain的搭建，从左到右依次执行\n",
    "text_chain_01 = prompt_template | model | StrOutputParser()\n",
    "\n",
    "result = text_chain_01.invoke(question)\n",
    "result"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "a43c3444-ec10-4a28-ab04-b9597cd817c1",
   "metadata": {},
   "source": [
    "#### 5.2结构化输出示例"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "89293216-0315-459f-bc08-165bc8b99e3e",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'name': '张三', 'age': 28}\n"
     ]
    }
   ],
   "source": [
    "from langchain.prompts import ChatPromptTemplate\n",
    "from langchain.chat_models import init_chat_model\n",
    "from langchain_core.output_parsers import JsonOutputParser\n",
    "\n",
    "# ✅ 1. 构造结构化输出解析器\n",
    "parser = JsonOutputParser()\n",
    "\n",
    "# ✅ 2. 构造Prompt模板，明确提示要求结构化输出\n",
    "prompt = ChatPromptTemplate.from_messages([\n",
    "    (\"system\", \"你是一个信息抽取助手，请从用户输入中提取 name 和 age，以 JSON 格式返回。\"),\n",
    "    (\"user\", \"输入内容是：{input_text}\\n\")\n",
    "])\n",
    "\n",
    "# ✅ 3. 初始化模型（以 DeepSeek Chat 为例）\n",
    "model = init_chat_model(\n",
    "    model=\"deepseek-chat\",\n",
    "    model_provider=\"deepseek\",\n",
    "    api_key=DEEPSEEK_API_KEY\n",
    ")\n",
    "\n",
    "# ✅ 4. 组装链\n",
    "structured_chain = prompt | model | parser\n",
    "\n",
    "# ✅ 5. 执行测试\n",
    "user_input = \"你好，我叫张三，今年28岁了，来自北京，很高兴认识你！\"\n",
    "result = structured_chain.invoke({\"input_text\": user_input})\n",
    "\n",
    "print(result)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "2399e5ad-5e01-4be9-be72-2922c59fb8f6",
   "metadata": {},
   "source": [
    "### 5.3嵌套Chains"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "d0596ee2-e442-4c33-8eb9-2976d51808f2",
   "metadata": {},
   "source": [
    "#### 5.3.1示例"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "fa8dae4c-d920-402b-988a-46da1adecad5",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'time': '秋季发布会', 'location': '加州总部', 'event': '苹果公司发布全新AI芯片，主打高性能与低功耗，将应用于下一代iPhone及Mac产品线。该芯片采用5nm制程工艺，AI运算速度提升40%，支持更复杂的机器学习任务。CEO库克表示，此举将“重新定义移动端人工智能体验”。'}\n"
     ]
    }
   ],
   "source": [
    "from langchain_core.prompts import PromptTemplate\n",
    "from langchain_core.runnables import RunnableSequence\n",
    "from langchain.output_parsers import ResponseSchema, StructuredOutputParser\n",
    "\n",
    "# 第一步：根据标题生成新闻正文\n",
    "news_gen_prompt = PromptTemplate.from_template(\n",
    "    \"请根据以下新闻标题撰写一段简短的新闻内容（100字以内）：\\n\\n标题：{title}\"\n",
    ")\n",
    "# 第一个子链：生成新闻内容（model 需提前定义好，比如接入具体大模型）\n",
    "news_chain = news_gen_prompt | model  \n",
    "\n",
    "# 第二步：从正文中提取结构化字段\n",
    "schemas = [\n",
    "    ResponseSchema(name=\"time\", description=\"事件发生的时间\"),\n",
    "    ResponseSchema(name=\"location\", description=\"事件发生的地点\"),\n",
    "    ResponseSchema(name=\"event\", description=\"发生的具体事件\"),\n",
    "]\n",
    "parser = StructuredOutputParser.from_response_schemas(schemas)\n",
    "\n",
    "summary_prompt = PromptTemplate.from_template(\n",
    "    \"请从下面这段新闻内容中提取关键信息，并返回结构化JSON格式：\\n\\n{news}\\n\\n{format_instructions}\"\n",
    ")\n",
    "# 第二个子链：生成新闻摘要\n",
    "summary_chain = (\n",
    "    summary_prompt.partial(format_instructions=parser.get_format_instructions())\n",
    "    | model\n",
    "    | parser\n",
    ")\n",
    "\n",
    "# 组合成一个复合 Chain\n",
    "full_chain = news_chain | summary_chain\n",
    "\n",
    "# 调用复合链\n",
    "result = full_chain.invoke({\"title\": \"苹果公司在加州发布新款AI芯片\"})\n",
    "print(result)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "762158c8-b31a-4915-b618-530d64353a39",
   "metadata": {},
   "source": [
    "#### 5.3.2疑问：为什么parser已经指定了输出结构，在prompt也要通过{format_instructions}提示输出结构？"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "42e1c670-74c3-40ca-b8f6-a0ada6c24aef",
   "metadata": {},
   "source": [
    "> 在 LangChain 中，parser（解析器）和 prompt 中的 format_instructions（格式指令）看似都是处理输出结构，但二者作用不同，需要配合使用，核心原因在于：\n",
    "> - parser 是 “事后验证”，format_instructions 是 “事前引导”\n",
    ">   - parser 的作用是 验证并解析模型的输出，它只能处理 “符合预期格式” 的内容。如果模型输出格式混乱（比如少了字段、语法错误），parser 会解析失败。 \n",
    ">   - format_instructions 的作用是 提前告诉模型 “必须输出什么格式”，从源头引导模型按规则生成内容，减少解析失败的概率。\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "3f8f4376-440c-486a-a58c-7c178b250d1e",
   "metadata": {},
   "source": [
    "#### 5.3.3langchain_core.runnables：自定义`函数`为`可执行节点`"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "c3ec10c7-1d86-4af7-8fa5-11cfa59efb31",
   "metadata": {},
   "source": [
    "> 比如 RunnableLambda 是 LangChain 中的一个 轻量级的可组合节点构造器，可以把一个普通的 Python 函数（如 lambda x: ... 或 def ...）包装成一个符合 LCEL（LangChain Expression Language） 规范的节点，这样它就可以和其他 LangChain 组件组成完整的链。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "810ea51c-fd18-4e4d-b032-b747b3773974",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "中间结果（新闻正文）: content='苹果公司今日在加州总部发布全新AI芯片，主打高性能与低功耗，旨在提升iPhone、Mac等设备的机器学习能力。该芯片采用先进制程工艺，可大幅提升图像处理、语音识别等AI任务效率。苹果表示，此举将推动端侧AI应用发展，为用户带来更流畅的智能体验。业界预计该芯片将率先搭载于下一代旗舰产品中。（98字）' additional_kwargs={'refusal': None} response_metadata={'token_usage': {'completion_tokens': 84, 'prompt_tokens': 28, 'total_tokens': 112, 'completion_tokens_details': None, 'prompt_tokens_details': {'audio_tokens': None, 'cached_tokens': 0}, 'prompt_cache_hit_tokens': 0, 'prompt_cache_miss_tokens': 28}, 'model_name': 'deepseek-chat', 'system_fingerprint': 'fp_8802369eaa_prod0623_fp8_kvcache', 'id': '67d3e96b-c1ed-4bb0-9ae6-30850ce60461', 'service_tier': None, 'finish_reason': 'stop', 'logprobs': None} id='run--a585b3f4-1c1d-45b6-b724-c9ff511d9f87-0' usage_metadata={'input_tokens': 28, 'output_tokens': 84, 'total_tokens': 112, 'input_token_details': {'cache_read': 0}, 'output_token_details': {}}\n",
      "{'time': '今日', 'location': '加州总部', 'event': '苹果公司发布全新AI芯片，主打高性能与低功耗，旨在提升iPhone、Mac等设备的机器学习能力。该芯片采用先进制程工艺，可大幅提升图像处理、语音识别等AI任务效率。苹果表示，此举将推动端侧AI应用发展，为用户带来更流畅的智能体验。业界预计该芯片将率先搭载于下一代旗舰产品中。'}\n"
     ]
    }
   ],
   "source": [
    "from langchain_core.runnables import RunnableLambda\n",
    "\n",
    "# 一个简单的打印函数，调试用\n",
    "def debug_print(x):\n",
    "    print(\"中间结果（新闻正文）:\", x)\n",
    "    return x\n",
    "\n",
    "debug_node = RunnableLambda(debug_print)\n",
    "\n",
    "# 插入 debug 节点（news_chain、summary_chain 需提前定义好，这里假设已定义）\n",
    "full_chain = news_chain | debug_node | summary_chain  \n",
    "\n",
    "# 调用复合链\n",
    "result = full_chain.invoke({\"title\": \"苹果公司在加州发布新款AI芯片\"})\n",
    "print(result)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "50a88791-8340-41d4-b433-042eeeef2b79",
   "metadata": {},
   "source": [
    "#### 5.3.4gradio创建交互界面示例"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "8677aeaf-f0a3-42e5-b22a-cce3723798ae",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    },
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Requirement already satisfied: langchain in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (0.3.26)\n",
      "Collecting gradio\n",
      "  Using cached gradio-5.38.0-py3-none-any.whl.metadata (16 kB)\n",
      "Requirement already satisfied: langchain-core<1.0.0,>=0.3.66 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain) (0.3.68)\n",
      "Requirement already satisfied: langchain-text-splitters<1.0.0,>=0.3.8 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain) (0.3.8)\n",
      "Requirement already satisfied: langsmith>=0.1.17 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain) (0.4.5)\n",
      "Requirement already satisfied: pydantic<3.0.0,>=2.7.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain) (2.11.7)\n",
      "Requirement already satisfied: SQLAlchemy<3,>=1.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain) (2.0.41)\n",
      "Requirement already satisfied: requests<3,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain) (2.32.4)\n",
      "Requirement already satisfied: PyYAML>=5.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain) (6.0.2)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain) (9.1.2)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain) (1.33)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain) (24.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain) (4.12.2)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.66->langchain) (3.0.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.33.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain) (2.33.2)\n",
      "Requirement already satisfied: typing-inspection>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain) (0.4.1)\n",
      "Requirement already satisfied: charset_normalizer<4,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain) (3.3.2)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain) (3.7)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain) (2.5.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain) (2025.6.15)\n",
      "Requirement already satisfied: greenlet>=1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from SQLAlchemy<3,>=1.4->langchain) (3.2.3)\n",
      "Requirement already satisfied: aiofiles<25.0,>=22.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (24.1.0)\n",
      "Requirement already satisfied: anyio<5.0,>=3.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (4.7.0)\n",
      "Requirement already satisfied: audioop-lts<1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.2.1)\n",
      "Requirement already satisfied: brotli>=1.1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (1.1.0)\n",
      "Requirement already satisfied: fastapi<1.0,>=0.115.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.116.1)\n",
      "Requirement already satisfied: ffmpy in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.6.0)\n",
      "Requirement already satisfied: gradio-client==1.11.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (1.11.0)\n",
      "Requirement already satisfied: groovy~=0.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.1.2)\n",
      "Requirement already satisfied: httpx<1.0,>=0.24.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.28.1)\n",
      "Requirement already satisfied: huggingface-hub>=0.28.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.33.4)\n",
      "Requirement already satisfied: jinja2<4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (3.1.6)\n",
      "Requirement already satisfied: markupsafe<4.0,>=2.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (3.0.2)\n",
      "Requirement already satisfied: numpy<3.0,>=1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (2.3.1)\n",
      "Requirement already satisfied: orjson~=3.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (3.10.18)\n",
      "Requirement already satisfied: pandas<3.0,>=1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (2.3.1)\n",
      "Requirement already satisfied: pillow<12.0,>=8.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (11.3.0)\n",
      "Requirement already satisfied: pydub in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.25.1)\n",
      "Requirement already satisfied: python-multipart>=0.0.18 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.0.20)\n",
      "Requirement already satisfied: ruff>=0.9.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.12.4)\n",
      "Requirement already satisfied: safehttpx<0.2.0,>=0.1.6 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.1.6)\n",
      "Requirement already satisfied: semantic-version~=2.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (2.10.0)\n",
      "Requirement already satisfied: starlette<1.0,>=0.40.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.47.1)\n",
      "Requirement already satisfied: tomlkit<0.14.0,>=0.12.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.13.3)\n",
      "Requirement already satisfied: typer<1.0,>=0.12 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.16.0)\n",
      "Requirement already satisfied: uvicorn>=0.14.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio) (0.35.0)\n",
      "Requirement already satisfied: fsspec in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio-client==1.11.0->gradio) (2025.7.0)\n",
      "Requirement already satisfied: websockets<16.0,>=10.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from gradio-client==1.11.0->gradio) (15.0.1)\n",
      "Requirement already satisfied: sniffio>=1.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from anyio<5.0,>=3.0->gradio) (1.3.0)\n",
      "Requirement already satisfied: httpcore==1.* in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1.0,>=0.24.1->gradio) (1.0.9)\n",
      "Requirement already satisfied: h11>=0.16 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpcore==1.*->httpx<1.0,>=0.24.1->gradio) (0.16.0)\n",
      "Requirement already satisfied: python-dateutil>=2.8.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pandas<3.0,>=1.0->gradio) (2.9.0.post0)\n",
      "Requirement already satisfied: pytz>=2020.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pandas<3.0,>=1.0->gradio) (2025.2)\n",
      "Requirement already satisfied: tzdata>=2022.7 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pandas<3.0,>=1.0->gradio) (2025.2)\n",
      "Requirement already satisfied: click>=8.0.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from typer<1.0,>=0.12->gradio) (8.2.1)\n",
      "Requirement already satisfied: shellingham>=1.3.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from typer<1.0,>=0.12->gradio) (1.5.4)\n",
      "Requirement already satisfied: rich>=10.11.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from typer<1.0,>=0.12->gradio) (14.0.0)\n",
      "Requirement already satisfied: colorama in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from click>=8.0.0->typer<1.0,>=0.12->gradio) (0.4.6)\n",
      "Requirement already satisfied: filelock in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from huggingface-hub>=0.28.1->gradio) (3.18.0)\n",
      "Requirement already satisfied: tqdm>=4.42.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from huggingface-hub>=0.28.1->gradio) (4.67.1)\n",
      "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.17->langchain) (1.0.0)\n",
      "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.17->langchain) (0.23.0)\n",
      "Requirement already satisfied: six>=1.5 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from python-dateutil>=2.8.2->pandas<3.0,>=1.0->gradio) (1.17.0)\n",
      "Requirement already satisfied: markdown-it-py>=2.2.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from rich>=10.11.0->typer<1.0,>=0.12->gradio) (3.0.0)\n",
      "Requirement already satisfied: pygments<3.0.0,>=2.13.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from rich>=10.11.0->typer<1.0,>=0.12->gradio) (2.19.1)\n",
      "Requirement already satisfied: mdurl~=0.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from markdown-it-py>=2.2.0->rich>=10.11.0->typer<1.0,>=0.12->gradio) (0.1.2)\n",
      "Using cached gradio-5.38.0-py3-none-any.whl (59.6 MB)\n",
      "Installing collected packages: gradio\n",
      "Successfully installed gradio-5.38.0\n",
      "Note: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip install langchain gradio"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "8020455d-d914-486b-acd7-08c7da761506",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "* Running on local URL:  http://127.0.0.1:7865\n",
      "* To create a public link, set `share=True` in `launch()`.\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div><iframe src=\"http://127.0.0.1:7865/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
      ],
      "text/plain": [
       "<IPython.core.display.HTML object>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "text/plain": []
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import gradio as gr\n",
    "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n",
    "from langchain_core.output_parsers import StrOutputParser\n",
    "from langchain.memory import ChatMessageHistory\n",
    "from langchain_core.messages import AIMessage, HumanMessage\n",
    "from langchain.chat_models import init_chat_model\n",
    "import os\n",
    "from dotenv import load_dotenv\n",
    "load_dotenv(\"text.env\", override=True)\n",
    "\n",
    "DEEPSEEK_API_KEY = os.getenv(\"DEEPSEEK_API_KEY\")\n",
    "\n",
    "model = init_chat_model(\n",
    "    model=\"deepseek-chat\",\n",
    "    api_key=DEEPSEEK_API_KEY,\n",
    "    streaming=True,\n",
    ")\n",
    "\n",
    "prompt_template = ChatPromptTemplate.from_messages([\n",
    "    (\"system\", \"你是一个RadioHead的乐迷\"),\n",
    "    MessagesPlaceholder(variable_name=\"history\"),\n",
    "    (\"user\", \"{input}\")\n",
    "])\n",
    "\n",
    "chain = prompt_template | model | StrOutputParser()\n",
    "\n",
    "chat_history = ChatMessageHistory()\n",
    "\n",
    "def chat(user_input):\n",
    "    chat_history.add_user_message(user_input)\n",
    "\n",
    "    history_as_tuples = [\n",
    "        (\"user\", msg.content) if isinstance(msg, HumanMessage) else (\"assistant\", msg.content)\n",
    "        for msg in chat_history.messages\n",
    "    ]\n",
    "\n",
    "    response_stream = chain.stream({\n",
    "        \"input\": user_input,\n",
    "        \"history\": history_as_tuples\n",
    "    })\n",
    "\n",
    "    full_response = \"\"\n",
    "    for chunk in response_stream:\n",
    "        full_response += chunk\n",
    "        yield full_response\n",
    "\n",
    "    chat_history.add_ai_message(full_response)\n",
    "\n",
    "    if len(chat_history.messages) > 20:\n",
    "        chat_history.messages = chat_history.messages[-20:]\n",
    "\n",
    "with gr.Blocks() as demo:\n",
    "    gr.Markdown(\"# 🎧 RadioHead AI 聊天助手\")\n",
    "    chatbot = gr.Chatbot(value=[], label=\"聊天记录\", height=500, type='messages')\n",
    "    with gr.Row():\n",
    "        msg = gr.Textbox(show_label=False, placeholder=\"输入你的问题...\", lines=2)\n",
    "        send = gr.Button(\"发送\")\n",
    "\n",
    "    def respond(user_input, chat_history_ui):\n",
    "        full_response = \"\"\n",
    "        for partial in chat(user_input):\n",
    "            full_response = partial\n",
    "            yield chat_history_ui + [\n",
    "                {\"role\": \"user\", \"content\": user_input},\n",
    "                {\"role\": \"assistant\", \"content\": full_response}\n",
    "            ], \"\"\n",
    "        yield chat_history_ui + [\n",
    "            {\"role\": \"user\", \"content\": user_input},\n",
    "            {\"role\": \"assistant\", \"content\": full_response}\n",
    "        ], \"\"\n",
    "\n",
    "    send.click(\n",
    "        respond,\n",
    "        inputs=[msg, chatbot],\n",
    "        outputs=[chatbot, msg]\n",
    "    )\n",
    "\n",
    "demo.launch()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "7fd2d9ea-3c27-4d44-a8d5-fbf5ffcd15e2",
   "metadata": {},
   "source": [
    "## 5.2LangChain Tools"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "7ac4b270-9243-43a7-b9a5-080d24af7032",
   "metadata": {},
   "source": [
    "#### 5.2.1LangChain工具 "
   ]
  },
  {
   "cell_type": "markdown",
   "id": "b56e265d-c17f-4616-8584-c6fa6586130a",
   "metadata": {},
   "source": [
    "- LangChain内置工具列表 ：https://python.langchain.com/docs/integrations/tools/"
   ]
  },
  {
   "attachments": {},
   "cell_type": "markdown",
   "id": "c61364b5-90e5-46d7-868f-255278fe6626",
   "metadata": {},
   "source": [
    "- 自定义LangChain内置代码解释器工具(1.编写执行tool 2.llm生成标准的tool_call接口 3.提取argument参数调用tool)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 67,
   "id": "0731b8b1-2ca4-4f89-aabc-9a47c1dc8762",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "ename": "BadRequestError",
     "evalue": "Error code: 400 - {'error': {'message': 'Model Not Exist', 'type': 'invalid_request_error', 'param': None, 'code': 'invalid_request_error'}}",
     "output_type": "error",
     "traceback": [
      "\u001b[31m---------------------------------------------------------------------------\u001b[39m",
      "\u001b[31mBadRequestError\u001b[39m                           Traceback (most recent call last)",
      "\u001b[36mCell\u001b[39m\u001b[36m \u001b[39m\u001b[32mIn[67]\u001b[39m\u001b[32m, line 39\u001b[39m\n\u001b[32m     36\u001b[39m parser = JsonOutputKeyToolsParser(key_name=tool.name, first_tool_only=\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[32m     38\u001b[39m tool_chain = llm_with_tool | parser | print_node | tool\n\u001b[32m---> \u001b[39m\u001b[32m39\u001b[39m code_res = tool_chain.invoke(\u001b[33m\"\u001b[39m\u001b[33m用python代码实现对[1,2,3]数组的遍历\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m     40\u001b[39m \u001b[38;5;28mprint\u001b[39m(\u001b[33m\"\u001b[39m\u001b[33m执行结果：\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[33m\"\u001b[39m, code_res)\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\langchain_core\\runnables\\base.py:3045\u001b[39m, in \u001b[36mRunnableSequence.invoke\u001b[39m\u001b[34m(self, input, config, **kwargs)\u001b[39m\n\u001b[32m   3043\u001b[39m \u001b[38;5;28;01mwith\u001b[39;00m set_config_context(config) \u001b[38;5;28;01mas\u001b[39;00m context:\n\u001b[32m   3044\u001b[39m     \u001b[38;5;28;01mif\u001b[39;00m i == \u001b[32m0\u001b[39m:\n\u001b[32m-> \u001b[39m\u001b[32m3045\u001b[39m         input_ = context.run(step.invoke, input_, config, **kwargs)\n\u001b[32m   3046\u001b[39m     \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m   3047\u001b[39m         input_ = context.run(step.invoke, input_, config)\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\langchain_core\\runnables\\base.py:5431\u001b[39m, in \u001b[36mRunnableBindingBase.invoke\u001b[39m\u001b[34m(self, input, config, **kwargs)\u001b[39m\n\u001b[32m   5424\u001b[39m \u001b[38;5;129m@override\u001b[39m\n\u001b[32m   5425\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34minvoke\u001b[39m(\n\u001b[32m   5426\u001b[39m     \u001b[38;5;28mself\u001b[39m,\n\u001b[32m   (...)\u001b[39m\u001b[32m   5429\u001b[39m     **kwargs: Optional[Any],\n\u001b[32m   5430\u001b[39m ) -> Output:\n\u001b[32m-> \u001b[39m\u001b[32m5431\u001b[39m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m.bound.invoke(\n\u001b[32m   5432\u001b[39m         \u001b[38;5;28minput\u001b[39m,\n\u001b[32m   5433\u001b[39m         \u001b[38;5;28mself\u001b[39m._merge_configs(config),\n\u001b[32m   5434\u001b[39m         **{**\u001b[38;5;28mself\u001b[39m.kwargs, **kwargs},\n\u001b[32m   5435\u001b[39m     )\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py:378\u001b[39m, in \u001b[36mBaseChatModel.invoke\u001b[39m\u001b[34m(self, input, config, stop, **kwargs)\u001b[39m\n\u001b[32m    366\u001b[39m \u001b[38;5;129m@override\u001b[39m\n\u001b[32m    367\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34minvoke\u001b[39m(\n\u001b[32m    368\u001b[39m     \u001b[38;5;28mself\u001b[39m,\n\u001b[32m   (...)\u001b[39m\u001b[32m    373\u001b[39m     **kwargs: Any,\n\u001b[32m    374\u001b[39m ) -> BaseMessage:\n\u001b[32m    375\u001b[39m     config = ensure_config(config)\n\u001b[32m    376\u001b[39m     \u001b[38;5;28;01mreturn\u001b[39;00m cast(\n\u001b[32m    377\u001b[39m         \u001b[33m\"\u001b[39m\u001b[33mChatGeneration\u001b[39m\u001b[33m\"\u001b[39m,\n\u001b[32m--> \u001b[39m\u001b[32m378\u001b[39m         \u001b[38;5;28mself\u001b[39m.generate_prompt(\n\u001b[32m    379\u001b[39m             [\u001b[38;5;28mself\u001b[39m._convert_input(\u001b[38;5;28minput\u001b[39m)],\n\u001b[32m    380\u001b[39m             stop=stop,\n\u001b[32m    381\u001b[39m             callbacks=config.get(\u001b[33m\"\u001b[39m\u001b[33mcallbacks\u001b[39m\u001b[33m\"\u001b[39m),\n\u001b[32m    382\u001b[39m             tags=config.get(\u001b[33m\"\u001b[39m\u001b[33mtags\u001b[39m\u001b[33m\"\u001b[39m),\n\u001b[32m    383\u001b[39m             metadata=config.get(\u001b[33m\"\u001b[39m\u001b[33mmetadata\u001b[39m\u001b[33m\"\u001b[39m),\n\u001b[32m    384\u001b[39m             run_name=config.get(\u001b[33m\"\u001b[39m\u001b[33mrun_name\u001b[39m\u001b[33m\"\u001b[39m),\n\u001b[32m    385\u001b[39m             run_id=config.pop(\u001b[33m\"\u001b[39m\u001b[33mrun_id\u001b[39m\u001b[33m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m),\n\u001b[32m    386\u001b[39m             **kwargs,\n\u001b[32m    387\u001b[39m         ).generations[\u001b[32m0\u001b[39m][\u001b[32m0\u001b[39m],\n\u001b[32m    388\u001b[39m     ).message\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py:963\u001b[39m, in \u001b[36mBaseChatModel.generate_prompt\u001b[39m\u001b[34m(self, prompts, stop, callbacks, **kwargs)\u001b[39m\n\u001b[32m    954\u001b[39m \u001b[38;5;129m@override\u001b[39m\n\u001b[32m    955\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mgenerate_prompt\u001b[39m(\n\u001b[32m    956\u001b[39m     \u001b[38;5;28mself\u001b[39m,\n\u001b[32m   (...)\u001b[39m\u001b[32m    960\u001b[39m     **kwargs: Any,\n\u001b[32m    961\u001b[39m ) -> LLMResult:\n\u001b[32m    962\u001b[39m     prompt_messages = [p.to_messages() \u001b[38;5;28;01mfor\u001b[39;00m p \u001b[38;5;129;01min\u001b[39;00m prompts]\n\u001b[32m--> \u001b[39m\u001b[32m963\u001b[39m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m.generate(prompt_messages, stop=stop, callbacks=callbacks, **kwargs)\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py:782\u001b[39m, in \u001b[36mBaseChatModel.generate\u001b[39m\u001b[34m(self, messages, stop, callbacks, tags, metadata, run_name, run_id, **kwargs)\u001b[39m\n\u001b[32m    779\u001b[39m \u001b[38;5;28;01mfor\u001b[39;00m i, m \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28menumerate\u001b[39m(input_messages):\n\u001b[32m    780\u001b[39m     \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m    781\u001b[39m         results.append(\n\u001b[32m--> \u001b[39m\u001b[32m782\u001b[39m             \u001b[38;5;28mself\u001b[39m._generate_with_cache(\n\u001b[32m    783\u001b[39m                 m,\n\u001b[32m    784\u001b[39m                 stop=stop,\n\u001b[32m    785\u001b[39m                 run_manager=run_managers[i] \u001b[38;5;28;01mif\u001b[39;00m run_managers \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[32m    786\u001b[39m                 **kwargs,\n\u001b[32m    787\u001b[39m             )\n\u001b[32m    788\u001b[39m         )\n\u001b[32m    789\u001b[39m     \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m    790\u001b[39m         \u001b[38;5;28;01mif\u001b[39;00m run_managers:\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py:1028\u001b[39m, in \u001b[36mBaseChatModel._generate_with_cache\u001b[39m\u001b[34m(self, messages, stop, run_manager, **kwargs)\u001b[39m\n\u001b[32m   1026\u001b[39m     result = generate_from_stream(\u001b[38;5;28miter\u001b[39m(chunks))\n\u001b[32m   1027\u001b[39m \u001b[38;5;28;01melif\u001b[39;00m inspect.signature(\u001b[38;5;28mself\u001b[39m._generate).parameters.get(\u001b[33m\"\u001b[39m\u001b[33mrun_manager\u001b[39m\u001b[33m\"\u001b[39m):\n\u001b[32m-> \u001b[39m\u001b[32m1028\u001b[39m     result = \u001b[38;5;28mself\u001b[39m._generate(\n\u001b[32m   1029\u001b[39m         messages, stop=stop, run_manager=run_manager, **kwargs\n\u001b[32m   1030\u001b[39m     )\n\u001b[32m   1031\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m   1032\u001b[39m     result = \u001b[38;5;28mself\u001b[39m._generate(messages, stop=stop, **kwargs)\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\langchain_deepseek\\chat_models.py:296\u001b[39m, in \u001b[36mChatDeepSeek._generate\u001b[39m\u001b[34m(self, messages, stop, run_manager, **kwargs)\u001b[39m\n\u001b[32m    288\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34m_generate\u001b[39m(\n\u001b[32m    289\u001b[39m     \u001b[38;5;28mself\u001b[39m,\n\u001b[32m    290\u001b[39m     messages: List[BaseMessage],\n\u001b[32m   (...)\u001b[39m\u001b[32m    293\u001b[39m     **kwargs: Any,\n\u001b[32m    294\u001b[39m ) -> ChatResult:\n\u001b[32m    295\u001b[39m     \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[32m--> \u001b[39m\u001b[32m296\u001b[39m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28msuper\u001b[39m()._generate(\n\u001b[32m    297\u001b[39m             messages, stop=stop, run_manager=run_manager, **kwargs\n\u001b[32m    298\u001b[39m         )\n\u001b[32m    299\u001b[39m     \u001b[38;5;28;01mexcept\u001b[39;00m JSONDecodeError \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[32m    300\u001b[39m         \u001b[38;5;28;01mraise\u001b[39;00m JSONDecodeError(\n\u001b[32m    301\u001b[39m             \u001b[33m\"\u001b[39m\u001b[33mDeepSeek API returned an invalid response. \u001b[39m\u001b[33m\"\u001b[39m\n\u001b[32m    302\u001b[39m             \u001b[33m\"\u001b[39m\u001b[33mPlease check the API status and try again.\u001b[39m\u001b[33m\"\u001b[39m,\n\u001b[32m    303\u001b[39m             e.doc,\n\u001b[32m    304\u001b[39m             e.pos,\n\u001b[32m    305\u001b[39m         ) \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34;01me\u001b[39;00m\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\langchain_openai\\chat_models\\base.py:1131\u001b[39m, in \u001b[36mBaseChatOpenAI._generate\u001b[39m\u001b[34m(self, messages, stop, run_manager, **kwargs)\u001b[39m\n\u001b[32m   1129\u001b[39m     generation_info = {\u001b[33m\"\u001b[39m\u001b[33mheaders\u001b[39m\u001b[33m\"\u001b[39m: \u001b[38;5;28mdict\u001b[39m(raw_response.headers)}\n\u001b[32m   1130\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[32m-> \u001b[39m\u001b[32m1131\u001b[39m     response = \u001b[38;5;28mself\u001b[39m.client.create(**payload)\n\u001b[32m   1132\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._create_chat_result(response, generation_info)\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\openai\\_utils\\_utils.py:287\u001b[39m, in \u001b[36mrequired_args.<locals>.inner.<locals>.wrapper\u001b[39m\u001b[34m(*args, **kwargs)\u001b[39m\n\u001b[32m    285\u001b[39m             msg = \u001b[33mf\u001b[39m\u001b[33m\"\u001b[39m\u001b[33mMissing required argument: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mquote(missing[\u001b[32m0\u001b[39m])\u001b[38;5;132;01m}\u001b[39;00m\u001b[33m\"\u001b[39m\n\u001b[32m    286\u001b[39m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(msg)\n\u001b[32m--> \u001b[39m\u001b[32m287\u001b[39m \u001b[38;5;28;01mreturn\u001b[39;00m func(*args, **kwargs)\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\openai\\resources\\chat\\completions\\completions.py:1087\u001b[39m, in \u001b[36mCompletions.create\u001b[39m\u001b[34m(self, messages, model, audio, frequency_penalty, function_call, functions, logit_bias, logprobs, max_completion_tokens, max_tokens, metadata, modalities, n, parallel_tool_calls, prediction, presence_penalty, reasoning_effort, response_format, seed, service_tier, stop, store, stream, stream_options, temperature, tool_choice, tools, top_logprobs, top_p, user, web_search_options, extra_headers, extra_query, extra_body, timeout)\u001b[39m\n\u001b[32m   1044\u001b[39m \u001b[38;5;129m@required_args\u001b[39m([\u001b[33m\"\u001b[39m\u001b[33mmessages\u001b[39m\u001b[33m\"\u001b[39m, \u001b[33m\"\u001b[39m\u001b[33mmodel\u001b[39m\u001b[33m\"\u001b[39m], [\u001b[33m\"\u001b[39m\u001b[33mmessages\u001b[39m\u001b[33m\"\u001b[39m, \u001b[33m\"\u001b[39m\u001b[33mmodel\u001b[39m\u001b[33m\"\u001b[39m, \u001b[33m\"\u001b[39m\u001b[33mstream\u001b[39m\u001b[33m\"\u001b[39m])\n\u001b[32m   1045\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mcreate\u001b[39m(\n\u001b[32m   1046\u001b[39m     \u001b[38;5;28mself\u001b[39m,\n\u001b[32m   (...)\u001b[39m\u001b[32m   1084\u001b[39m     timeout: \u001b[38;5;28mfloat\u001b[39m | httpx.Timeout | \u001b[38;5;28;01mNone\u001b[39;00m | NotGiven = NOT_GIVEN,\n\u001b[32m   1085\u001b[39m ) -> ChatCompletion | Stream[ChatCompletionChunk]:\n\u001b[32m   1086\u001b[39m     validate_response_format(response_format)\n\u001b[32m-> \u001b[39m\u001b[32m1087\u001b[39m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m._post(\n\u001b[32m   1088\u001b[39m         \u001b[33m\"\u001b[39m\u001b[33m/chat/completions\u001b[39m\u001b[33m\"\u001b[39m,\n\u001b[32m   1089\u001b[39m         body=maybe_transform(\n\u001b[32m   1090\u001b[39m             {\n\u001b[32m   1091\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mmessages\u001b[39m\u001b[33m\"\u001b[39m: messages,\n\u001b[32m   1092\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mmodel\u001b[39m\u001b[33m\"\u001b[39m: model,\n\u001b[32m   1093\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33maudio\u001b[39m\u001b[33m\"\u001b[39m: audio,\n\u001b[32m   1094\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mfrequency_penalty\u001b[39m\u001b[33m\"\u001b[39m: frequency_penalty,\n\u001b[32m   1095\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mfunction_call\u001b[39m\u001b[33m\"\u001b[39m: function_call,\n\u001b[32m   1096\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mfunctions\u001b[39m\u001b[33m\"\u001b[39m: functions,\n\u001b[32m   1097\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mlogit_bias\u001b[39m\u001b[33m\"\u001b[39m: logit_bias,\n\u001b[32m   1098\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mlogprobs\u001b[39m\u001b[33m\"\u001b[39m: logprobs,\n\u001b[32m   1099\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mmax_completion_tokens\u001b[39m\u001b[33m\"\u001b[39m: max_completion_tokens,\n\u001b[32m   1100\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mmax_tokens\u001b[39m\u001b[33m\"\u001b[39m: max_tokens,\n\u001b[32m   1101\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mmetadata\u001b[39m\u001b[33m\"\u001b[39m: metadata,\n\u001b[32m   1102\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mmodalities\u001b[39m\u001b[33m\"\u001b[39m: modalities,\n\u001b[32m   1103\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mn\u001b[39m\u001b[33m\"\u001b[39m: n,\n\u001b[32m   1104\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mparallel_tool_calls\u001b[39m\u001b[33m\"\u001b[39m: parallel_tool_calls,\n\u001b[32m   1105\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mprediction\u001b[39m\u001b[33m\"\u001b[39m: prediction,\n\u001b[32m   1106\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mpresence_penalty\u001b[39m\u001b[33m\"\u001b[39m: presence_penalty,\n\u001b[32m   1107\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mreasoning_effort\u001b[39m\u001b[33m\"\u001b[39m: reasoning_effort,\n\u001b[32m   1108\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mresponse_format\u001b[39m\u001b[33m\"\u001b[39m: response_format,\n\u001b[32m   1109\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mseed\u001b[39m\u001b[33m\"\u001b[39m: seed,\n\u001b[32m   1110\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mservice_tier\u001b[39m\u001b[33m\"\u001b[39m: service_tier,\n\u001b[32m   1111\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mstop\u001b[39m\u001b[33m\"\u001b[39m: stop,\n\u001b[32m   1112\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mstore\u001b[39m\u001b[33m\"\u001b[39m: store,\n\u001b[32m   1113\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mstream\u001b[39m\u001b[33m\"\u001b[39m: stream,\n\u001b[32m   1114\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mstream_options\u001b[39m\u001b[33m\"\u001b[39m: stream_options,\n\u001b[32m   1115\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mtemperature\u001b[39m\u001b[33m\"\u001b[39m: temperature,\n\u001b[32m   1116\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mtool_choice\u001b[39m\u001b[33m\"\u001b[39m: tool_choice,\n\u001b[32m   1117\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mtools\u001b[39m\u001b[33m\"\u001b[39m: tools,\n\u001b[32m   1118\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mtop_logprobs\u001b[39m\u001b[33m\"\u001b[39m: top_logprobs,\n\u001b[32m   1119\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mtop_p\u001b[39m\u001b[33m\"\u001b[39m: top_p,\n\u001b[32m   1120\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33muser\u001b[39m\u001b[33m\"\u001b[39m: user,\n\u001b[32m   1121\u001b[39m                 \u001b[33m\"\u001b[39m\u001b[33mweb_search_options\u001b[39m\u001b[33m\"\u001b[39m: web_search_options,\n\u001b[32m   1122\u001b[39m             },\n\u001b[32m   1123\u001b[39m             completion_create_params.CompletionCreateParamsStreaming\n\u001b[32m   1124\u001b[39m             \u001b[38;5;28;01mif\u001b[39;00m stream\n\u001b[32m   1125\u001b[39m             \u001b[38;5;28;01melse\u001b[39;00m completion_create_params.CompletionCreateParamsNonStreaming,\n\u001b[32m   1126\u001b[39m         ),\n\u001b[32m   1127\u001b[39m         options=make_request_options(\n\u001b[32m   1128\u001b[39m             extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout\n\u001b[32m   1129\u001b[39m         ),\n\u001b[32m   1130\u001b[39m         cast_to=ChatCompletion,\n\u001b[32m   1131\u001b[39m         stream=stream \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28;01mFalse\u001b[39;00m,\n\u001b[32m   1132\u001b[39m         stream_cls=Stream[ChatCompletionChunk],\n\u001b[32m   1133\u001b[39m     )\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\openai\\_base_client.py:1256\u001b[39m, in \u001b[36mSyncAPIClient.post\u001b[39m\u001b[34m(self, path, cast_to, body, options, files, stream, stream_cls)\u001b[39m\n\u001b[32m   1242\u001b[39m \u001b[38;5;28;01mdef\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[34mpost\u001b[39m(\n\u001b[32m   1243\u001b[39m     \u001b[38;5;28mself\u001b[39m,\n\u001b[32m   1244\u001b[39m     path: \u001b[38;5;28mstr\u001b[39m,\n\u001b[32m   (...)\u001b[39m\u001b[32m   1251\u001b[39m     stream_cls: \u001b[38;5;28mtype\u001b[39m[_StreamT] | \u001b[38;5;28;01mNone\u001b[39;00m = \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[32m   1252\u001b[39m ) -> ResponseT | _StreamT:\n\u001b[32m   1253\u001b[39m     opts = FinalRequestOptions.construct(\n\u001b[32m   1254\u001b[39m         method=\u001b[33m\"\u001b[39m\u001b[33mpost\u001b[39m\u001b[33m\"\u001b[39m, url=path, json_data=body, files=to_httpx_files(files), **options\n\u001b[32m   1255\u001b[39m     )\n\u001b[32m-> \u001b[39m\u001b[32m1256\u001b[39m     \u001b[38;5;28;01mreturn\u001b[39;00m cast(ResponseT, \u001b[38;5;28mself\u001b[39m.request(cast_to, opts, stream=stream, stream_cls=stream_cls))\n",
      "\u001b[36mFile \u001b[39m\u001b[32mD:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\Lib\\site-packages\\openai\\_base_client.py:1044\u001b[39m, in \u001b[36mSyncAPIClient.request\u001b[39m\u001b[34m(self, cast_to, options, stream, stream_cls)\u001b[39m\n\u001b[32m   1041\u001b[39m             err.response.read()\n\u001b[32m   1043\u001b[39m         log.debug(\u001b[33m\"\u001b[39m\u001b[33mRe-raising status error\u001b[39m\u001b[33m\"\u001b[39m)\n\u001b[32m-> \u001b[39m\u001b[32m1044\u001b[39m         \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m._make_status_error_from_response(err.response) \u001b[38;5;28;01mfrom\u001b[39;00m\u001b[38;5;250m \u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[32m   1046\u001b[39m     \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[32m   1048\u001b[39m \u001b[38;5;28;01massert\u001b[39;00m response \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m, \u001b[33m\"\u001b[39m\u001b[33mcould not resolve response (should never happen)\u001b[39m\u001b[33m\"\u001b[39m\n",
      "\u001b[31mBadRequestError\u001b[39m: Error code: 400 - {'error': {'message': 'Model Not Exist', 'type': 'invalid_request_error', 'param': None, 'code': 'invalid_request_error'}}"
     ]
    }
   ],
   "source": [
    "from langchain_core.tools import Tool\n",
    "\n",
    "import contextlib\n",
    "import io\n",
    "\n",
    "def safe_python_executor(code: str) -> str:\n",
    "    \"\"\"执行一段 Python 代码并返回输出。\"\"\"\n",
    "    output = io.StringIO()\n",
    "    with contextlib.redirect_stdout(output):\n",
    "        try:\n",
    "            exec(code, globals())\n",
    "        except Exception as e:\n",
    "            return f\"[错误] {e}\"\n",
    "    return output.getvalue().strip()\n",
    "\n",
    "tool = Tool.from_function(\n",
    "    func=safe_python_executor,\n",
    "    name=\"python_repl\",\n",
    "    description=\"一个可以执行Python代码的工具，适合数学计算、变量赋值和函数调用。输入应为完整Python语句，例如 'import math; math.sqrt(4)'\",\n",
    ")\n",
    "\n",
    "# llm注册工具\n",
    "llm_with_tool = model.bind_tools([tool])\n",
    "\n",
    "# 函数节点：输出中间过程代码\n",
    "from langchain_core.runnables import RunnableLambda\n",
    "\n",
    "def code_print(res):\n",
    "    print(\"生成的代码：\\n\",res['__arg1'])\n",
    "    return res\n",
    "\n",
    "print_node = RunnableLambda(code_print)\n",
    "\n",
    "# 链\n",
    "from langchain_core.output_parsers.openai_tools import JsonOutputKeyToolsParser\n",
    "parser = JsonOutputKeyToolsParser(key_name=tool.name, first_tool_only=True)\n",
    "\n",
    "tool_chain = llm_with_tool | parser | print_node | tool\n",
    "code_res = tool_chain.invoke(\"用python代码实现对[1,2,3]数组的遍历\")\n",
    "print(\"执行结果：\\n\", code_res)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "86074155-0cb2-4512-a3a8-a3ca346cc8fb",
   "metadata": {},
   "source": [
    "### 5.2.2 @tool 调用外部工具"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "cea93b9c-e158-4e12-9036-21a3c6485705",
   "metadata": {},
   "source": [
    "- 调用外部天气API"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "id": "bf4cbd7f-b004-4699-992b-f8573c75da34",
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "import requests\n",
    "import json\n",
    "from langchain_core.tools import tool\n",
    "load_dotenv(override=True)\n",
    "\n",
    "OPENWEATHER_API_KEY = os.getenv(\"OPENWEATHER_API_KEY\")\n",
    "\n",
    "@tool\n",
    "def get_weather(loc):\n",
    "    \"\"\"\n",
    "    查询即时天气函数。\n",
    "    :param loc: 字符串类型，查询城市的英文名。\n",
    "                注意：中国城市必须用英文名（例如 'Beijing' 表示北京）。\n",
    "    :return: 返回字符串形式的 JSON，内容为 OpenWeather API 返回的实时天气信息。\n",
    "             包括天气描述、温度、湿度、风速等数据。\n",
    "    \"\"\"\n",
    "\n",
    "    # Step 1. 构建请求\n",
    "    url = \"https://api.openweathermap.org/data/2.5/weather\"\n",
    "    \n",
    "    # Step 2. 设置查询参数\n",
    "    params = {\n",
    "        \"q\": loc,\n",
    "        \"appid\": os.getenv(\"OPENWEATHER_API_KEY\"),  # 从环境变量获取 API Key\n",
    "        \"units\": \"metric\",  # 使用摄氏温度（若需华氏度可改为 \"imperial\"）\n",
    "        \"lang\": \"zh_cn\"     # 输出语言为简体中文\n",
    "    }\n",
    "    \n",
    "    # Step 3. 发送 GET 请求\n",
    "    response = requests.get(url, params=params)\n",
    "\n",
    "    # Step 4. 解析响应\n",
    "    data = response.json()\n",
    "    return json.dumps(data)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 72,
   "id": "1d3bf11c-acfa-4ef0-abe1-58a4258aba8a",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "执行结果：\n",
      " {\"coord\": {\"lon\": 116.3972, \"lat\": 39.9075}, \"weather\": [{\"id\": 804, \"main\": \"Clouds\", \"description\": \"\\u9634\\uff0c\\u591a\\u4e91\", \"icon\": \"04n\"}], \"base\": \"stations\", \"main\": {\"temp\": 27.14, \"feels_like\": 31.49, \"temp_min\": 27.14, \"temp_max\": 27.14, \"pressure\": 999, \"humidity\": 93, \"sea_level\": 999, \"grnd_level\": 994}, \"visibility\": 9049, \"wind\": {\"speed\": 0.41, \"deg\": 67, \"gust\": 2.54}, \"clouds\": {\"all\": 100}, \"dt\": 1754152139, \"sys\": {\"country\": \"CN\", \"sunrise\": 1754169272, \"sunset\": 1754220397}, \"timezone\": 28800, \"id\": 1816670, \"name\": \"Beijing\", \"cod\": 200}\n"
     ]
    }
   ],
   "source": [
    "from langchain.chat_models import init_chat_model\n",
    "\n",
    "# 初始化模型\n",
    "model = init_chat_model(model=\"deepseek-chat\",model_provider=\"deepseek\",api_key=DEEPSEEK_API_KEY)\n",
    "# llm注册工具\n",
    "llm_with_tool = model.bind_tools([get_weather])\n",
    "\n",
    "# 链\n",
    "from langchain_core.output_parsers.openai_tools import JsonOutputKeyToolsParser\n",
    "parser = JsonOutputKeyToolsParser(key_name=get_weather.name, first_tool_only=True)\n",
    "\n",
    "tool_chain = llm_with_tool | parser | get_weather"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "acf7ce65-b1e2-42b7-96a8-25f3b77fb2ae",
   "metadata": {},
   "source": [
    "- 以上tool call得到了正确的Json响应，但是还需要转换为用户懂的形式"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 77,
   "id": "3a2e089d-aa7a-47f6-b18f-b22db40a7ad5",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "广州现在阴天多云，气温27.4℃，但体感闷热有32℃。夜间风力不大，稍微有点潮湿，能见度不错。\n"
     ]
    }
   ],
   "source": [
    "from langchain_core.output_parsers import StrOutputParser\n",
    "# 3. 新增：定义格式化输出的Prompt模板\n",
    "format_prompt = ChatPromptTemplate.from_template(\"\"\"\n",
    "请将以下天气数据转换为用户容易理解的自然语言：\n",
    "{weather_data}\n",
    "\n",
    "输出格式要求：\n",
    "1. 必须包含城市名、天气状况、气温、体感温度\n",
    "2. 语言简洁易懂，用中文口语化表达\n",
    "3. 不需要使用专业术语，例如将\"clear sky\"翻译为\"晴空万里\"\n",
    "\"\"\")\n",
    "\n",
    "# 4. 构建格式化链（原始数据 → Prompt → 模型 → 字符串输出）\n",
    "format_chain = format_prompt | model | StrOutputParser()\n",
    "\n",
    "# 5. 串联完整流程：查询天气 → 格式化结果\n",
    "full_chain = tool_chain | format_chain\n",
    "\n",
    "# 执行\n",
    "result = full_chain.invoke(\"查询广州今天的天气，并说明时间\")\n",
    "print(result)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "55d1fbe6-404d-4213-befb-59b118bd912d",
   "metadata": {},
   "source": [
    "# 5.3 Langchain Agent"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "c5bd75d5-f92a-4a7e-8827-460be43c0b99",
   "metadata": {},
   "source": [
    " - 创建Agnet"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 80,
   "id": "540f253d-442f-45f3-a21e-ebed8d0c51ae",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.agents import create_tool_calling_agent, tool\n",
    "from langchain.prompts import ChatMessagePromptTemplate\n",
    "\n",
    "\n",
    "tools = [get_weather]\n",
    "\n",
    "# 构建提示模版\n",
    "# 1.这里的prompt是必选项之一（prompt，model，tools）\n",
    "# 2.placeholder 是占位符变量 其对应的值通常包含 Agent 在思考和决策过程中的中间步骤、工具调用的历史记录和返回结果等信息。\n",
    "prompt = ChatPromptTemplate.from_messages(\n",
    "    [\n",
    "        (\"system\", \"你是天气助手，请根据用户的问题，给出相应的天气信息\"),\n",
    "        (\"human\", \"{input}\"),\n",
    "        (\"placeholder\", \"{agent_scratchpad}\"),\n",
    "    ]\n",
    ")\n",
    "\n",
    "# 初始化模型\n",
    "model = init_chat_model(model=\"deepseek-chat\",model_provider=\"deepseek\",api_key=DEEPSEEK_API_KEY)\n",
    "\n",
    "# 直接使用 `create_tool_calling_agent` 创建代理\n",
    "agent = create_tool_calling_agent(model, tools, prompt)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "ba5baa5e-4827-46cb-bfe8-fc5814cb04b4",
   "metadata": {},
   "source": [
    "- 使用Agentexecutor来执行代理"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 82,
   "id": "18a1484b-8194-488b-a1a1-1744d6a852eb",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "\n",
      "\u001b[1m> Entering new AgentExecutor chain...\u001b[0m\n",
      "\u001b[32;1m\u001b[1;3m\n",
      "Invoking: `get_weather` with `{'loc': 'Guangzhou'}`\n",
      "\n",
      "\n",
      "\u001b[0m\u001b[36;1m\u001b[1;3m{\"coord\": {\"lon\": 113.25, \"lat\": 23.1167}, \"weather\": [{\"id\": 500, \"main\": \"Rain\", \"description\": \"\\u5c0f\\u96e8\", \"icon\": \"10n\"}], \"base\": \"stations\", \"main\": {\"temp\": 27.95, \"feels_like\": 33.59, \"temp_min\": 27.95, \"temp_max\": 27.95, \"pressure\": 1004, \"humidity\": 90, \"sea_level\": 1004, \"grnd_level\": 1003}, \"visibility\": 10000, \"wind\": {\"speed\": 3.01, \"deg\": 167, \"gust\": 8.06}, \"rain\": {\"1h\": 0.87}, \"clouds\": {\"all\": 100}, \"dt\": 1754232265, \"sys\": {\"country\": \"CN\", \"sunrise\": 1754171899, \"sunset\": 1754219280}, \"timezone\": 28800, \"id\": 1809858, \"name\": \"Guangzhou\", \"cod\": 200}\u001b[0m\u001b[32;1m\u001b[1;3m今天广州的天气是小雨，气温为27.95°C，体感温度较高，约为33.59°C。湿度为90%，风速为3.01米/秒，风向为167度。目前能见度为10000米，气压为1004百帕。请注意携带雨具出行！\u001b[0m\n",
      "\n",
      "\u001b[1m> Finished chain.\u001b[0m\n",
      "{'input': '请问今天广州的天气怎么样？', 'output': '今天广州的天气是小雨，气温为27.95°C，体感温度较高，约为33.59°C。湿度为90%，风速为3.01米/秒，风向为167度。目前能见度为10000米，气压为1004百帕。请注意携带雨具出行！'}\n"
     ]
    }
   ],
   "source": [
    "from langchain.agents import AgentExecutor\n",
    "\n",
    "agent_executor = AgentExecutor(agent = agent, tools = tools, verbose = True)\n",
    "\n",
    "reponse = agent_executor.invoke({\"input\": \"请问今天广州的天气怎么样？\"})\n",
    "print(reponse)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "04b779fa-b9e4-4f8d-9f28-7df4f2f0d173",
   "metadata": {},
   "source": [
    "### 5.3.1基于LangChain内置工具构造Agent"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "15446df7-f32f-4b34-a597-453ee547a9de",
   "metadata": {},
   "source": [
    "- 基于 Tavily Search API 的 Agent"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 87,
   "id": "160c1b0b-9408-454c-86c1-9cf9d2643702",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Requirement already satisfied: langchain-tavily in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (0.2.11)\n",
      "Requirement already satisfied: aiohttp<4.0.0,>=3.11.14 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-tavily) (3.12.14)\n",
      "Requirement already satisfied: langchain<0.4.0,>=0.3.20 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-tavily) (0.3.26)\n",
      "Requirement already satisfied: langchain-core<0.4.0,>=0.3.15 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-tavily) (0.3.68)\n",
      "Requirement already satisfied: requests<3.0.0,>=2.32.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-tavily) (2.32.4)\n",
      "Requirement already satisfied: aiohappyeyeballs>=2.5.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.11.14->langchain-tavily) (2.6.1)\n",
      "Requirement already satisfied: aiosignal>=1.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.11.14->langchain-tavily) (1.4.0)\n",
      "Requirement already satisfied: attrs>=17.3.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.11.14->langchain-tavily) (24.3.0)\n",
      "Requirement already satisfied: frozenlist>=1.1.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.11.14->langchain-tavily) (1.7.0)\n",
      "Requirement already satisfied: multidict<7.0,>=4.5 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.11.14->langchain-tavily) (6.6.3)\n",
      "Requirement already satisfied: propcache>=0.2.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.11.14->langchain-tavily) (0.3.2)\n",
      "Requirement already satisfied: yarl<2.0,>=1.17.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.11.14->langchain-tavily) (1.20.1)\n",
      "Requirement already satisfied: langchain-text-splitters<1.0.0,>=0.3.8 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<0.4.0,>=0.3.20->langchain-tavily) (0.3.8)\n",
      "Requirement already satisfied: langsmith>=0.1.17 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<0.4.0,>=0.3.20->langchain-tavily) (0.4.5)\n",
      "Requirement already satisfied: pydantic<3.0.0,>=2.7.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<0.4.0,>=0.3.20->langchain-tavily) (2.11.7)\n",
      "Requirement already satisfied: SQLAlchemy<3,>=1.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<0.4.0,>=0.3.20->langchain-tavily) (2.0.41)\n",
      "Requirement already satisfied: PyYAML>=5.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<0.4.0,>=0.3.20->langchain-tavily) (6.0.2)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-tavily) (9.1.2)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-tavily) (1.33)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-tavily) (24.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<0.4.0,>=0.3.15->langchain-tavily) (4.12.2)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<0.4.0,>=0.3.15->langchain-tavily) (3.0.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<0.4.0,>=0.3.20->langchain-tavily) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.33.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<0.4.0,>=0.3.20->langchain-tavily) (2.33.2)\n",
      "Requirement already satisfied: typing-inspection>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<0.4.0,>=0.3.20->langchain-tavily) (0.4.1)\n",
      "Requirement already satisfied: charset_normalizer<4,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3.0.0,>=2.32.3->langchain-tavily) (3.3.2)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3.0.0,>=2.32.3->langchain-tavily) (3.7)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3.0.0,>=2.32.3->langchain-tavily) (2.5.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3.0.0,>=2.32.3->langchain-tavily) (2025.6.15)\n",
      "Requirement already satisfied: greenlet>=1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from SQLAlchemy<3,>=1.4->langchain<0.4.0,>=0.3.20->langchain-tavily) (3.2.3)\n",
      "Requirement already satisfied: httpx<1,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.17->langchain<0.4.0,>=0.3.20->langchain-tavily) (0.28.1)\n",
      "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.17->langchain<0.4.0,>=0.3.20->langchain-tavily) (3.10.18)\n",
      "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.17->langchain<0.4.0,>=0.3.20->langchain-tavily) (1.0.0)\n",
      "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.17->langchain<0.4.0,>=0.3.20->langchain-tavily) (0.23.0)\n",
      "Requirement already satisfied: anyio in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.1.17->langchain<0.4.0,>=0.3.20->langchain-tavily) (4.7.0)\n",
      "Requirement already satisfied: httpcore==1.* in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.1.17->langchain<0.4.0,>=0.3.20->langchain-tavily) (1.0.9)\n",
      "Requirement already satisfied: h11>=0.16 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith>=0.1.17->langchain<0.4.0,>=0.3.20->langchain-tavily) (0.16.0)\n",
      "Requirement already satisfied: sniffio>=1.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from anyio->httpx<1,>=0.23.0->langsmith>=0.1.17->langchain<0.4.0,>=0.3.20->langchain-tavily) (1.3.0)\n"
     ]
    }
   ],
   "source": [
    "!pip install -U langchain-tavily"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 91,
   "id": "e01191c5-5d1f-495c-b645-bde5fc48532e",
   "metadata": {
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "import os\n",
    "from dotenv import load_dotenv\n",
    "load_dotenv(override=True)\n",
    "from langchain_tavily import TavilySearch\n",
    "\n",
    "search_tool = TavilySearch(max_results=2)\n",
    "# search_tool.invoke(\"动画SonnyBoy\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "87257e13-e974-47ce-85ff-650fae565edf",
   "metadata": {},
   "source": [
    "- 创建Agent"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 92,
   "id": "90577dcb-345d-43b1-a4e4-5212bba317e7",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.agents import create_tool_calling_agent, tool\n",
    "from langchain.prompts import ChatMessagePromptTemplate\n",
    "\n",
    "\n",
    "tools = [search_tool]\n",
    "\n",
    "# 构建提示模版\n",
    "# 1.这里的prompt是必选项之一（prompt，model，tools）\n",
    "# 2.placeholder 是占位符变量 其对应的值通常包含 Agent 在思考和决策过程中的中间步骤、工具调用的历史记录和返回结果等信息。\n",
    "prompt = ChatPromptTemplate.from_messages(\n",
    "    [\n",
    "        (\"system\", \"你是助手，可以调用工具网络搜索，获取实时信息\"),\n",
    "        (\"human\", \"{input}\"),\n",
    "        (\"placeholder\", \"{agent_scratchpad}\"),\n",
    "    ]\n",
    ")\n",
    "\n",
    "# 初始化模型\n",
    "model = init_chat_model(model=\"deepseek-chat\",model_provider=\"deepseek\",api_key=DEEPSEEK_API_KEY)\n",
    "\n",
    "# 直接使用 `create_tool_calling_agent` 创建代理\n",
    "agent = create_tool_calling_agent(model, tools, prompt)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "22a37c3e-fb60-40bb-aae0-3b347f8c88f7",
   "metadata": {},
   "source": [
    "- 执行 Agent"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 95,
   "id": "ca8dd963-7f7c-46ee-b252-0286c296c2a6",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "{'input': '查询官网，2025水星奖入围作品公布时间？', 'output': '根据水星奖（Mercury Prize）官网的信息，2025年水星奖的入围作品（12张“年度专辑”）将于**2025年9月10日（星期三）**公布。更多详情可以访问官网：[Mercury Prize 2025](https://www.mercuryprize.com/news/2025-mercury-prize-moves-to-newcastle-for-first-time)。'}\n"
     ]
    }
   ],
   "source": [
    "from langchain.agents import AgentExecutor\n",
    "\n",
    "agent_executor = AgentExecutor(agent = agent, tools = tools)\n",
    "\n",
    "reponse = agent_executor.invoke({\"input\": \"查询官网，2025水星奖入围作品公布时间？\"})\n",
    "print(reponse)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "76d2234c-7d87-43e9-84df-6b7e404e65fb",
   "metadata": {},
   "source": [
    "# 6.实战案例"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "f7a6c0ec-4b15-4970-92db-31def06417a6",
   "metadata": {},
   "source": [
    "### 6.1浏览器自动化"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 96,
   "id": "13e48c01-12d6-42ba-adca-25629e011ced",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting playwright\n",
      "  Downloading playwright-1.54.0-py3-none-win_amd64.whl.metadata (3.5 kB)\n",
      "Collecting lxml\n",
      "  Downloading lxml-6.0.0-cp313-cp313-win_amd64.whl.metadata (6.8 kB)\n",
      "Requirement already satisfied: langchain_community in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (0.3.27)\n",
      "Requirement already satisfied: beautifulsoup4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (4.12.3)\n",
      "Collecting reportlab\n",
      "  Downloading reportlab-4.4.3-py3-none-any.whl.metadata (1.7 kB)\n",
      "Collecting pyee<14,>=13 (from playwright)\n",
      "  Downloading pyee-13.0.0-py3-none-any.whl.metadata (2.9 kB)\n",
      "Requirement already satisfied: greenlet<4.0.0,>=3.1.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from playwright) (3.2.3)\n",
      "Requirement already satisfied: typing-extensions in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pyee<14,>=13->playwright) (4.12.2)\n",
      "Requirement already satisfied: langchain-core<1.0.0,>=0.3.66 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (0.3.68)\n",
      "Requirement already satisfied: langchain<1.0.0,>=0.3.26 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (0.3.26)\n",
      "Requirement already satisfied: SQLAlchemy<3,>=1.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (2.0.41)\n",
      "Requirement already satisfied: requests<3,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (2.32.4)\n",
      "Requirement already satisfied: PyYAML>=5.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (6.0.2)\n",
      "Requirement already satisfied: aiohttp<4.0.0,>=3.8.3 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (3.12.14)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<10,>=8.1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (9.1.2)\n",
      "Requirement already satisfied: dataclasses-json<0.7,>=0.5.7 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (0.6.7)\n",
      "Requirement already satisfied: pydantic-settings<3.0.0,>=2.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (2.10.1)\n",
      "Requirement already satisfied: langsmith>=0.1.125 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (0.4.5)\n",
      "Requirement already satisfied: httpx-sse<1.0.0,>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (0.4.1)\n",
      "Requirement already satisfied: numpy>=2.1.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain_community) (2.3.1)\n",
      "Requirement already satisfied: aiohappyeyeballs>=2.5.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain_community) (2.6.1)\n",
      "Requirement already satisfied: aiosignal>=1.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain_community) (1.4.0)\n",
      "Requirement already satisfied: attrs>=17.3.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain_community) (24.3.0)\n",
      "Requirement already satisfied: frozenlist>=1.1.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain_community) (1.7.0)\n",
      "Requirement already satisfied: multidict<7.0,>=4.5 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain_community) (6.6.3)\n",
      "Requirement already satisfied: propcache>=0.2.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain_community) (0.3.2)\n",
      "Requirement already satisfied: yarl<2.0,>=1.17.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from aiohttp<4.0.0,>=3.8.3->langchain_community) (1.20.1)\n",
      "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from dataclasses-json<0.7,>=0.5.7->langchain_community) (3.26.1)\n",
      "Requirement already satisfied: typing-inspect<1,>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from dataclasses-json<0.7,>=0.5.7->langchain_community) (0.9.0)\n",
      "Requirement already satisfied: langchain-text-splitters<1.0.0,>=0.3.8 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<1.0.0,>=0.3.26->langchain_community) (0.3.8)\n",
      "Requirement already satisfied: pydantic<3.0.0,>=2.7.4 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain<1.0.0,>=0.3.26->langchain_community) (2.11.7)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain_community) (1.33)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.66->langchain_community) (24.2)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.66->langchain_community) (3.0.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<1.0.0,>=0.3.26->langchain_community) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.33.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<1.0.0,>=0.3.26->langchain_community) (2.33.2)\n",
      "Requirement already satisfied: typing-inspection>=0.4.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain<1.0.0,>=0.3.26->langchain_community) (0.4.1)\n",
      "Requirement already satisfied: python-dotenv>=0.21.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from pydantic-settings<3.0.0,>=2.4.0->langchain_community) (1.1.1)\n",
      "Requirement already satisfied: charset_normalizer<4,>=2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain_community) (3.3.2)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain_community) (3.7)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain_community) (2.5.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from requests<3,>=2->langchain_community) (2025.6.15)\n",
      "Requirement already satisfied: mypy-extensions>=0.3.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from typing-inspect<1,>=0.4.0->dataclasses-json<0.7,>=0.5.7->langchain_community) (1.1.0)\n",
      "Requirement already satisfied: soupsieve>1.2 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from beautifulsoup4) (2.5)\n",
      "Requirement already satisfied: pillow>=9.0.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from reportlab) (11.3.0)\n",
      "Requirement already satisfied: httpx<1,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.125->langchain_community) (0.28.1)\n",
      "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.125->langchain_community) (3.10.18)\n",
      "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.125->langchain_community) (1.0.0)\n",
      "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from langsmith>=0.1.125->langchain_community) (0.23.0)\n",
      "Requirement already satisfied: anyio in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.1.125->langchain_community) (4.7.0)\n",
      "Requirement already satisfied: httpcore==1.* in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith>=0.1.125->langchain_community) (1.0.9)\n",
      "Requirement already satisfied: h11>=0.16 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith>=0.1.125->langchain_community) (0.16.0)\n",
      "Requirement already satisfied: sniffio>=1.1 in d:\\python_dev_tools\\miniconda3\\envs\\jupyter_env\\lib\\site-packages (from anyio->httpx<1,>=0.23.0->langsmith>=0.1.125->langchain_community) (1.3.0)\n",
      "Downloading playwright-1.54.0-py3-none-win_amd64.whl (35.5 MB)\n",
      "   ---------------------------------------- 0.0/35.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.3/35.5 MB ? eta -:--:--\n",
      "   - -------------------------------------- 1.6/35.5 MB 4.4 MB/s eta 0:00:08\n",
      "   --- ------------------------------------ 2.9/35.5 MB 5.2 MB/s eta 0:00:07\n",
      "   ---- ----------------------------------- 4.2/35.5 MB 5.7 MB/s eta 0:00:06\n",
      "   ------ --------------------------------- 5.8/35.5 MB 5.9 MB/s eta 0:00:06\n",
      "   ------- -------------------------------- 7.1/35.5 MB 6.1 MB/s eta 0:00:05\n",
      "   --------- ------------------------------ 8.7/35.5 MB 6.4 MB/s eta 0:00:05\n",
      "   ----------- ---------------------------- 10.5/35.5 MB 6.6 MB/s eta 0:00:04\n",
      "   ------------- -------------------------- 12.1/35.5 MB 6.7 MB/s eta 0:00:04\n",
      "   --------------- ------------------------ 13.6/35.5 MB 6.8 MB/s eta 0:00:04\n",
      "   ----------------- ---------------------- 15.2/35.5 MB 6.8 MB/s eta 0:00:03\n",
      "   ------------------ --------------------- 16.8/35.5 MB 6.9 MB/s eta 0:00:03\n",
      "   -------------------- ------------------- 18.4/35.5 MB 6.9 MB/s eta 0:00:03\n",
      "   ---------------------- ----------------- 19.7/35.5 MB 6.8 MB/s eta 0:00:03\n",
      "   ----------------------- ---------------- 21.2/35.5 MB 6.9 MB/s eta 0:00:03\n",
      "   ------------------------- -------------- 22.5/35.5 MB 6.9 MB/s eta 0:00:02\n",
      "   --------------------------- ------------ 24.1/35.5 MB 6.9 MB/s eta 0:00:02\n",
      "   ---------------------------- ----------- 25.4/35.5 MB 6.9 MB/s eta 0:00:02\n",
      "   ------------------------------ --------- 27.0/35.5 MB 6.9 MB/s eta 0:00:02\n",
      "   ------------------------------- -------- 28.3/35.5 MB 6.9 MB/s eta 0:00:02\n",
      "   --------------------------------- ------ 29.9/35.5 MB 6.9 MB/s eta 0:00:01\n",
      "   ----------------------------------- ---- 31.2/35.5 MB 6.8 MB/s eta 0:00:01\n",
      "   ------------------------------------ --- 32.5/35.5 MB 6.8 MB/s eta 0:00:01\n",
      "   -------------------------------------- - 34.1/35.5 MB 6.8 MB/s eta 0:00:01\n",
      "   ---------------------------------------  35.4/35.5 MB 6.8 MB/s eta 0:00:01\n",
      "   ---------------------------------------- 35.5/35.5 MB 6.7 MB/s eta 0:00:00\n",
      "Downloading pyee-13.0.0-py3-none-any.whl (15 kB)\n",
      "Downloading lxml-6.0.0-cp313-cp313-win_amd64.whl (4.0 MB)\n",
      "   ---------------------------------------- 0.0/4.0 MB ? eta -:--:--\n",
      "   ---------- ----------------------------- 1.0/4.0 MB 5.9 MB/s eta 0:00:01\n",
      "   -------------------- ------------------- 2.1/4.0 MB 5.6 MB/s eta 0:00:01\n",
      "   ---------------------------------------  3.9/4.0 MB 6.6 MB/s eta 0:00:01\n",
      "   ---------------------------------------- 4.0/4.0 MB 6.3 MB/s eta 0:00:00\n",
      "Downloading reportlab-4.4.3-py3-none-any.whl (2.0 MB)\n",
      "   ---------------------------------------- 0.0/2.0 MB ? eta -:--:--\n",
      "   -------------------------- ------------- 1.3/2.0 MB 6.9 MB/s eta 0:00:01\n",
      "   ---------------------------------------- 2.0/2.0 MB 6.9 MB/s eta 0:00:00\n",
      "Installing collected packages: reportlab, pyee, lxml, playwright\n",
      "\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------------------------------------- 0/4 [reportlab]\n",
      "   ---------- ----------------------------- 1/4 [pyee]\n",
      "   -------------------- ------------------- 2/4 [lxml]\n",
      "   -------------------- ------------------- 2/4 [lxml]\n",
      "   -------------------- ------------------- 2/4 [lxml]\n",
      "   -------------------- ------------------- 2/4 [lxml]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ------------------------------ --------- 3/4 [playwright]\n",
      "   ---------------------------------------- 4/4 [playwright]\n",
      "\n",
      "Successfully installed lxml-6.0.0 playwright-1.54.0 pyee-13.0.0 reportlab-4.4.3\n"
     ]
    }
   ],
   "source": [
    "!pip install playwright lxml langchain_community beautifulsoup4 reportlab"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "e4b3f5f7-164d-41bd-a2e3-929b749e493d",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "=== 创建串行链：网站总结 → PDF生成 ===\n",
      "选择处理方式:\n",
      "1. 简单串行链（直接总结 → PDF）\n",
      "2. 优化串行链（总结 → 优化 → PDF）\n"
     ]
    },
    {
     "name": "stdin",
     "output_type": "stream",
     "text": [
      "请选择 (1/2):  2\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "============================================================\n",
      "🤖 网站内容PDF生成器\n",
      "============================================================\n",
      "\n",
      "🔄 开始处理URL (优化版): https://github.com/fufankeji/MateGen/blob/main/README_zh.md\n",
      "📝 步骤1: 网站总结...\n",
      "🎨 步骤2: 内容优化...\n",
      "📄 步骤3: 生成PDF...\n",
      "✅ 成功注册中文字体: C:/Windows/Fonts/simhei.ttf\n",
      "⚠️ 段落处理失败: | **修复方案**     | 方案1：改用 Async API（推荐）<br> 方案2：将同步代...\n",
      "⚠️ 段落处理失败: | **代码示例**     | ```python<br>async with async_pla...\n",
      "📄 PDF文件生成完成: D:\\python_dev_tools\\langchain\\website_summary_20250805_232241.pdf\n",
      "✅ 完成: PDF文件已成功生成: D:\\python_dev_tools\\langchain\\website_summary_20250805_232241.pdf\n",
      "\n",
      "============================================================\n",
      "🎉 任务完成！\n",
      "============================================================\n"
     ]
    }
   ],
   "source": [
    "from langchain_community.agent_toolkits import PlayWrightBrowserToolkit\n",
    "from langchain_community.tools.playwright.utils import create_sync_playwright_browser\n",
    "from langchain import hub\n",
    "from langchain.agents import AgentExecutor, create_openai_tools_agent\n",
    "from langchain.chat_models import init_chat_model\n",
    "from langchain_core.tools import tool\n",
    "from langchain_core.prompts import ChatPromptTemplate\n",
    "from langchain_core.output_parsers import StrOutputParser\n",
    "from reportlab.lib.pagesizes import letter, A4\n",
    "from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer\n",
    "from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle\n",
    "from reportlab.lib.enums import TA_JUSTIFY, TA_CENTER\n",
    "from reportlab.pdfbase import pdfmetrics\n",
    "from reportlab.pdfbase.ttfonts import TTFont\n",
    "import os\n",
    "from datetime import datetime\n",
    "import os\n",
    "from dotenv import load_dotenv \n",
    "load_dotenv(override=True)\n",
    "\n",
    "\n",
    "DeepSeek_API_KEY = os.getenv(\"DEEPSEEK_API_KEY\")\n",
    "\n",
    "\n",
    "# pip install langchain playwright langchain_community langchain-deepseek lxml langchainhub beautifulsoup4 reportlab\n",
    "\n",
    "# 1. 创建网站总结工具\n",
    "@tool\n",
    "def summarize_website(url: str) -> str:\n",
    "    \"\"\"访问指定网站并返回内容总结\"\"\"\n",
    "    try:\n",
    "        # 创建浏览器实例\n",
    "        sync_browser = create_sync_playwright_browser()\n",
    "        toolkit = PlayWrightBrowserToolkit.from_browser(sync_browser=sync_browser)\n",
    "        tools = toolkit.get_tools()\n",
    "        \n",
    "        # 初始化模型和Agent\n",
    "        model = init_chat_model(\"deepseek-chat\", model_provider=\"deepseek\")\n",
    "        prompt = hub.pull(\"hwchase17/openai-tools-agent\")\n",
    "        agent = create_openai_tools_agent(model, tools, prompt)\n",
    "        agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=False)\n",
    "        \n",
    "        # 执行总结任务\n",
    "        command = {\n",
    "            \"input\": f\"访问这个网站 {url} 并帮我详细总结一下这个网站的内容，包括主要功能、特点和使用方法\"\n",
    "        }\n",
    "        \n",
    "        result = agent_executor.invoke(command)\n",
    "        return result.get(\"output\", \"无法获取网站内容总结\")\n",
    "        \n",
    "    except Exception as e:\n",
    "        return f\"网站访问失败: {str(e)}\"\n",
    "\n",
    "# 2. 创建PDF生成工具\n",
    "@tool  \n",
    "def generate_pdf(content: str) -> str:\n",
    "    \"\"\"将文本内容生成为PDF文件\"\"\"\n",
    "    try:\n",
    "        # 生成文件名（带时间戳）\n",
    "        timestamp = datetime.now().strftime(\"%Y%m%d_%H%M%S\")\n",
    "        filename = f\"website_summary_{timestamp}.pdf\"\n",
    "        \n",
    "        # 创建PDF文档\n",
    "        doc = SimpleDocTemplate(filename, pagesize=A4)\n",
    "        styles = getSampleStyleSheet()\n",
    "        \n",
    "        # 注册中文字体（如果系统有的话）\n",
    "        try:\n",
    "            # Windows 系统字体路径\n",
    "            font_paths = [\n",
    "                \"C:/Windows/Fonts/simhei.ttf\",  # 黑体\n",
    "                \"C:/Windows/Fonts/simsun.ttc\",  # 宋体\n",
    "                \"C:/Windows/Fonts/msyh.ttc\",    # 微软雅黑\n",
    "            ]\n",
    "            \n",
    "            chinese_font_registered = False\n",
    "            for font_path in font_paths:\n",
    "                if os.path.exists(font_path):\n",
    "                    try:\n",
    "                        pdfmetrics.registerFont(TTFont('ChineseFont', font_path))\n",
    "                        chinese_font_registered = True\n",
    "                        print(f\"✅ 成功注册中文字体: {font_path}\")\n",
    "                        break\n",
    "                    except:\n",
    "                        continue\n",
    "                        \n",
    "            if not chinese_font_registered:\n",
    "                print(\"⚠️ 未找到中文字体，使用默认字体\")\n",
    "                \n",
    "        except Exception as e:\n",
    "            print(f\"⚠️ 字体注册失败: {e}\")\n",
    "        \n",
    "        # 自定义样式 - 支持中文\n",
    "        title_style = ParagraphStyle(\n",
    "            'CustomTitle',\n",
    "            parent=styles['Heading1'],\n",
    "            fontSize=18,\n",
    "            alignment=TA_CENTER,\n",
    "            spaceAfter=30,\n",
    "            fontName='ChineseFont' if 'chinese_font_registered' in locals() and chinese_font_registered else 'Helvetica-Bold'\n",
    "        )\n",
    "        \n",
    "        content_style = ParagraphStyle(\n",
    "            'CustomContent',\n",
    "            parent=styles['Normal'],\n",
    "            fontSize=11,\n",
    "            alignment=TA_JUSTIFY,\n",
    "            leftIndent=20,\n",
    "            rightIndent=20,\n",
    "            spaceAfter=12,\n",
    "            fontName='ChineseFont' if 'chinese_font_registered' in locals() and chinese_font_registered else 'Helvetica'\n",
    "        )\n",
    "        \n",
    "        # 构建PDF内容\n",
    "        story = []\n",
    "        \n",
    "        # 标题\n",
    "        story.append(Paragraph(\"网站内容总结报告\", title_style))\n",
    "        story.append(Spacer(1, 20))\n",
    "        \n",
    "        # 生成时间\n",
    "        time_text = f\"生成时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\"\n",
    "        story.append(Paragraph(time_text, styles['Normal']))\n",
    "        story.append(Spacer(1, 20))\n",
    "        \n",
    "        # 分隔线\n",
    "        story.append(Paragraph(\"=\" * 50, styles['Normal']))\n",
    "        story.append(Spacer(1, 15))\n",
    "        \n",
    "        # 主要内容 - 改进中文处理\n",
    "        if content:\n",
    "            # 清理和处理内容\n",
    "            content = content.replace('\\r\\n', '\\n').replace('\\r', '\\n')\n",
    "            paragraphs = content.split('\\n')\n",
    "            \n",
    "            for para in paragraphs:\n",
    "                if para.strip():\n",
    "                    # 处理特殊字符，确保PDF可以正确显示\n",
    "                    clean_para = para.strip()\n",
    "                    # 转换HTML实体\n",
    "                    clean_para = clean_para.replace('&lt;', '<').replace('&gt;', '>').replace('&amp;', '&')\n",
    "                    \n",
    "                    try:\n",
    "                        story.append(Paragraph(clean_para, content_style))\n",
    "                        story.append(Spacer(1, 8))\n",
    "                    except Exception as para_error:\n",
    "                        # 如果段落有问题，尝试用默认字体\n",
    "                        try:\n",
    "                            fallback_style = ParagraphStyle(\n",
    "                                'Fallback',\n",
    "                                parent=styles['Normal'],\n",
    "                                fontSize=10,\n",
    "                                leftIndent=20,\n",
    "                                rightIndent=20,\n",
    "                                spaceAfter=10\n",
    "                            )\n",
    "                            story.append(Paragraph(clean_para, fallback_style))\n",
    "                            story.append(Spacer(1, 8))\n",
    "                        except:\n",
    "                            # 如果还是有问题，记录错误但继续\n",
    "                            print(f\"⚠️ 段落处理失败: {clean_para[:50]}...\")\n",
    "                            continue\n",
    "        else:\n",
    "            story.append(Paragraph(\"暂无内容\", content_style))\n",
    "        \n",
    "        # 页脚信息\n",
    "        story.append(Spacer(1, 30))\n",
    "        story.append(Paragraph(\"=\" * 50, styles['Normal']))\n",
    "        story.append(Paragraph(\"本报告由 Playwright PDF Agent 自动生成\", styles['Italic']))\n",
    "        \n",
    "        # 生成PDF\n",
    "        doc.build(story)\n",
    "        \n",
    "        # 获取绝对路径\n",
    "        abs_path = os.path.abspath(filename)\n",
    "        print(f\"📄 PDF文件生成完成: {abs_path}\")\n",
    "        return f\"PDF文件已成功生成: {abs_path}\"\n",
    "        \n",
    "    except Exception as e:\n",
    "        error_msg = f\"PDF生成失败: {str(e)}\"\n",
    "        print(error_msg)\n",
    "        return error_msg\n",
    "\n",
    "# 3. 创建串行链\n",
    "print(\"=== 创建串行链：网站总结 → PDF生成 ===\")\n",
    "\n",
    "# 方法1：简单串行链\n",
    "simple_chain = summarize_website | generate_pdf\n",
    "\n",
    "# 方法2：带LLM优化的串行链\n",
    "optimization_prompt = ChatPromptTemplate.from_template(\n",
    "    \"\"\"请优化以下网站总结内容，使其更适合PDF报告格式：\n",
    "\n",
    "原始总结：\n",
    "{summary}\n",
    "\n",
    "请重新组织内容，包括：\n",
    "1. 清晰的标题和结构\n",
    "2. 要点总结\n",
    "3. 详细说明\n",
    "4. 使用要求等\n",
    "\n",
    "优化后的内容：\"\"\"\n",
    ")\n",
    "\n",
    "model = init_chat_model(\"deepseek-chat\", model_provider=\"deepseek\")\n",
    "\n",
    "# 带优化的串行链：网站总结 → LLM优化 → PDF生成\n",
    "optimized_chain = (\n",
    "    summarize_website \n",
    "    | (lambda summary: {\"summary\": summary})\n",
    "    | optimization_prompt \n",
    "    | model \n",
    "    | StrOutputParser() \n",
    "    | generate_pdf\n",
    ")\n",
    "\n",
    "# 4. 测试函数\n",
    "def test_simple_chain(url: str):\n",
    "    \"\"\"测试简单串行链\"\"\"\n",
    "    print(f\"\\n🔄 开始处理URL: {url}\")\n",
    "    print(\"📝 步骤1: 网站总结...\")\n",
    "    print(\"📄 步骤2: 生成PDF...\")\n",
    "    \n",
    "    result = simple_chain.invoke(url)\n",
    "    print(f\"✅ 完成: {result}\")\n",
    "    return result\n",
    "\n",
    "def test_optimized_chain(url: str):\n",
    "    \"\"\"测试优化串行链\"\"\"\n",
    "    print(f\"\\n🔄 开始处理URL (优化版): {url}\")\n",
    "    print(\"📝 步骤1: 网站总结...\")\n",
    "    print(\"🎨 步骤2: 内容优化...\")\n",
    "    print(\"📄 步骤3: 生成PDF...\")\n",
    "    \n",
    "    result = optimized_chain.invoke(url)\n",
    "    print(f\"✅ 完成: {result}\")\n",
    "    return result\n",
    "\n",
    "# 5. 创建交互式函数\n",
    "def create_website_pdf_report(url: str, use_optimization: bool = True):\n",
    "    \"\"\"创建网站PDF报告的主函数\"\"\"\n",
    "    print(\"=\" * 60)\n",
    "    print(\"🤖 网站内容PDF生成器\")\n",
    "    print(\"=\" * 60)\n",
    "    \n",
    "    try:\n",
    "        if use_optimization:\n",
    "            result = test_optimized_chain(url)\n",
    "        else:\n",
    "            result = test_simple_chain(url)\n",
    "            \n",
    "        print(\"\\n\" + \"=\" * 60)\n",
    "        print(\"🎉 任务完成！\")\n",
    "        print(\"=\" * 60)\n",
    "        return result\n",
    "        \n",
    "    except Exception as e:\n",
    "        error_msg = f\"❌ 处理失败: {str(e)}\"\n",
    "        print(error_msg)\n",
    "        return error_msg\n",
    "\n",
    "# 6. 主程序入口\n",
    "if __name__ == \"__main__\":\n",
    "    # 测试URL\n",
    "    test_url = \"https://github.com/fufankeji/MateGen/blob/main/README_zh.md\"\n",
    "    \n",
    "    print(\"选择处理方式:\")\n",
    "    print(\"1. 简单串行链（直接总结 → PDF）\")\n",
    "    print(\"2. 优化串行链（总结 → 优化 → PDF）\")\n",
    "    \n",
    "    choice = input(\"请选择 (1/2): \").strip()\n",
    "    \n",
    "    if choice == \"1\":\n",
    "        create_website_pdf_report(test_url, use_optimization=False)\n",
    "    elif choice == \"2\":\n",
    "        create_website_pdf_report(test_url, use_optimization=True)\n",
    "    else:\n",
    "        print(\"使用默认优化模式...\")\n",
    "        create_website_pdf_report(test_url, use_optimization=True) "
   ]
  },
  {
   "cell_type": "markdown",
   "id": "52c3f218-637c-4a5d-9373-c4c3fb7a85b0",
   "metadata": {},
   "source": [
    "# 7.Langchain + MCP"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "316aaf04-c3b5-4956-8942-7149385a50ef",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.13.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
