{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: http://mirrors.aliyun.com/pypi/simple/\n",
      "Collecting langchain\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/18/7d/0f4cc3317634195381f87c5d90268f29b9a31fda62aa7a7f36a1c27b06f3/langchain-0.3.19-py3-none-any.whl (1.0 MB)\n",
      "     ---------------------------------------- 0.0/1.0 MB ? eta -:--:--\n",
      "     ---------------------------------------- 1.0/1.0 MB 9.6 MB/s eta 0:00:00\n",
      "Collecting langchain-core<1.0.0,>=0.3.35 (from langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/fd/3f/f14983390600c25a98000f2a49c8f98db412bde30433b87f7decab6034e7/langchain_core-0.3.39-py3-none-any.whl (414 kB)\n",
      "Collecting langchain-text-splitters<1.0.0,>=0.3.6 (from langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/4c/f8/6b82af988e65af9697f6a2f25373fb173fd32d48b62772a8773c5184c870/langchain_text_splitters-0.3.6-py3-none-any.whl (31 kB)\n",
      "Collecting langsmith<0.4,>=0.1.17 (from langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/ff/68/514ffa62860202a5a0a3acbf5c05017ef9df38d4437d2cb44a3cf93d617b/langsmith-0.3.11-py3-none-any.whl (335 kB)\n",
      "Requirement already satisfied: pydantic<3.0.0,>=2.7.4 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain) (2.10.6)\n",
      "Collecting SQLAlchemy<3,>=1.4 (from langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/a4/17/364a99c8c5698492c7fa40fc463bf388f05b0b03b74028828b71a79dc89d/SQLAlchemy-2.0.38-cp311-cp311-win_amd64.whl (2.1 MB)\n",
      "     ---------------------------------------- 0.0/2.1 MB ? eta -:--:--\n",
      "     ---------------------------------------  2.1/2.1 MB 11.8 MB/s eta 0:00:01\n",
      "     ---------------------------------------- 2.1/2.1 MB 11.8 MB/s eta 0:00:00\n",
      "Requirement already satisfied: requests<3,>=2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain) (2.32.3)\n",
      "Requirement already satisfied: PyYAML>=5.3 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain) (6.0.2)\n",
      "Collecting aiohttp<4.0.0,>=3.8.3 (from langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/29/23/d98d491ca073ee92cc6a741be97b6b097fb06dacc5f95c0c9350787db549/aiohttp-3.11.13-cp311-cp311-win_amd64.whl (442 kB)\n",
      "Collecting tenacity!=8.4.0,<10,>=8.1.0 (from langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl (28 kB)\n",
      "Collecting numpy<2,>=1.26.4 (from langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl (15.8 MB)\n",
      "     ---------------------------------------- 0.0/15.8 MB ? eta -:--:--\n",
      "     --- ------------------------------------ 1.6/15.8 MB 7.6 MB/s eta 0:00:02\n",
      "     --------- ------------------------------ 3.9/15.8 MB 10.2 MB/s eta 0:00:02\n",
      "     --------------- ------------------------ 6.3/15.8 MB 10.4 MB/s eta 0:00:01\n",
      "     --------------------- ------------------ 8.7/15.8 MB 11.0 MB/s eta 0:00:01\n",
      "     --------------------------- ----------- 11.0/15.8 MB 11.1 MB/s eta 0:00:01\n",
      "     --------------------------------- ----- 13.6/15.8 MB 11.3 MB/s eta 0:00:01\n",
      "     --------------------------------------  15.7/15.8 MB 11.4 MB/s eta 0:00:01\n",
      "     --------------------------------------- 15.8/15.8 MB 11.2 MB/s eta 0:00:00\n",
      "Collecting aiohappyeyeballs>=2.3.0 (from aiohttp<4.0.0,>=3.8.3->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/44/4c/03fb05f56551828ec67ceb3665e5dc51638042d204983a03b0a1541475b6/aiohappyeyeballs-2.4.6-py3-none-any.whl (14 kB)\n",
      "Collecting aiosignal>=1.1.2 (from aiohttp<4.0.0,>=3.8.3->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/ec/6a/bc7e17a3e87a2985d3e8f4da4cd0f481060eb78fb08596c42be62c90a4d9/aiosignal-1.3.2-py2.py3-none-any.whl (7.6 kB)\n",
      "Collecting attrs>=17.3.0 (from aiohttp<4.0.0,>=3.8.3->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/fc/30/d4986a882011f9df997a55e6becd864812ccfcd821d64aac8570ee39f719/attrs-25.1.0-py3-none-any.whl (63 kB)\n",
      "Collecting frozenlist>=1.1.1 (from aiohttp<4.0.0,>=3.8.3->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/ca/8c/2ddffeb8b60a4bce3b196c32fcc30d8830d4615e7b492ec2071da801b8ad/frozenlist-1.5.0-cp311-cp311-win_amd64.whl (51 kB)\n",
      "Collecting multidict<7.0,>=4.5 (from aiohttp<4.0.0,>=3.8.3->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/9f/0b/ad879847ecbf6d27e90a6eabb7eff6b62c129eefe617ea45eae7c1f0aead/multidict-6.1.0-cp311-cp311-win_amd64.whl (28 kB)\n",
      "Collecting propcache>=0.2.0 (from aiohttp<4.0.0,>=3.8.3->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/d8/35/57abeb6146fe3c19081eeaf3d9d4cfea256f87f1e5101acf80d3332c1820/propcache-0.3.0-cp311-cp311-win_amd64.whl (44 kB)\n",
      "Collecting yarl<2.0,>=1.17.0 (from aiohttp<4.0.0,>=3.8.3->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/ae/7b/8600250b3d89b625f1121d897062f629883c2f45339623b69b1747ec65fa/yarl-1.18.3-cp311-cp311-win_amd64.whl (91 kB)\n",
      "Collecting jsonpatch<2.0,>=1.33 (from langchain-core<1.0.0,>=0.3.35->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl (12 kB)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.35->langchain) (24.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.35->langchain) (4.12.2)\n",
      "Requirement already satisfied: httpx<1,>=0.23.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.17->langchain) (0.28.1)\n",
      "Collecting orjson<4.0.0,>=3.9.14 (from langsmith<0.4,>=0.1.17->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/00/f8/bb60a4644287a544ec81df1699d5b965776bc9848d9029d9f9b3402ac8bb/orjson-3.10.15-cp311-cp311-win_amd64.whl (133 kB)\n",
      "Collecting requests-toolbelt<2.0.0,>=1.0.0 (from langsmith<0.4,>=0.1.17->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl (54 kB)\n",
      "Collecting zstandard<0.24.0,>=0.23.0 (from langsmith<0.4,>=0.1.17->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/be/a2/4272175d47c623ff78196f3c10e9dc7045c1b9caf3735bf041e65271eca4/zstandard-0.23.0-cp311-cp311-win_amd64.whl (495 kB)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.27.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from pydantic<3.0.0,>=2.7.4->langchain) (2.27.2)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests<3,>=2->langchain) (3.4.1)\n",
      "Requirement already satisfied: idna<4,>=2.5 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests<3,>=2->langchain) (3.10)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests<3,>=2->langchain) (2.3.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests<3,>=2->langchain) (2025.1.31)\n",
      "Collecting greenlet!=0.4.17 (from SQLAlchemy<3,>=1.4->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/12/da/b9ed5e310bb8b89661b80cbcd4db5a067903bbcd7fc854923f5ebb4144f0/greenlet-3.1.1-cp311-cp311-win_amd64.whl (298 kB)\n",
      "Requirement already satisfied: anyio in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.17->langchain) (4.8.0)\n",
      "Requirement already satisfied: httpcore==1.* in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx<1,>=0.23.0->langsmith<0.4,>=0.1.17->langchain) (1.0.7)\n",
      "Requirement already satisfied: h11<0.15,>=0.13 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpcore==1.*->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.17->langchain) (0.14.0)\n",
      "Collecting jsonpointer>=1.9 (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.35->langchain)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl (7.6 kB)\n",
      "Requirement already satisfied: sniffio>=1.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from anyio->httpx<1,>=0.23.0->langsmith<0.4,>=0.1.17->langchain) (1.3.1)\n",
      "Installing collected packages: zstandard, tenacity, propcache, orjson, numpy, multidict, jsonpointer, greenlet, frozenlist, attrs, aiohappyeyeballs, yarl, SQLAlchemy, requests-toolbelt, jsonpatch, aiosignal, langsmith, aiohttp, langchain-core, langchain-text-splitters, langchain\n",
      "Successfully installed SQLAlchemy-2.0.38 aiohappyeyeballs-2.4.6 aiohttp-3.11.13 aiosignal-1.3.2 attrs-25.1.0 frozenlist-1.5.0 greenlet-3.1.1 jsonpatch-1.33 jsonpointer-3.0.0 langchain-0.3.19 langchain-core-0.3.39 langchain-text-splitters-0.3.6 langsmith-0.3.11 multidict-6.1.0 numpy-1.26.4 orjson-3.10.15 propcache-0.3.0 requests-toolbelt-1.0.0 tenacity-9.0.0 yarl-1.18.3 zstandard-0.23.0\n",
      "Looking in indexes: http://mirrors.aliyun.com/pypi/simple/\n",
      "Requirement already satisfied: langchain-core in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (0.3.39)\n",
      "Collecting langgraph\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/df/3d/d6034923e0a03aa406b067e4777b611ffc65a8906a2efa214e845b3f84d4/langgraph-0.2.74-py3-none-any.whl (151 kB)\n",
      "Requirement already satisfied: langsmith<0.4,>=0.1.125 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core) (0.3.11)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core) (9.0.0)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core) (1.33)\n",
      "Requirement already satisfied: PyYAML>=5.3 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core) (6.0.2)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core) (24.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core) (4.12.2)\n",
      "Requirement already satisfied: pydantic<3.0.0,>=2.5.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core) (2.10.6)\n",
      "Collecting langgraph-checkpoint<3.0.0,>=2.0.10 (from langgraph)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/7c/63/03bc3dd304ead45b53313cab8727329e1d139a2d220f2d030c72242c860e/langgraph_checkpoint-2.0.16-py3-none-any.whl (38 kB)\n",
      "Collecting langgraph-sdk<0.2.0,>=0.1.42 (from langgraph)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/fc/97/3492a07b454cc74bf49938e83f0a95c608a8bc5c3dda338091d3c66e3ec5/langgraph_sdk-0.1.53-py3-none-any.whl (45 kB)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core) (3.0.0)\n",
      "Collecting msgpack<2.0.0,>=1.1.0 (from langgraph-checkpoint<3.0.0,>=2.0.10->langgraph)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/aa/c4/5a582fc9a87991a3e6f6800e9bb2f3c82972912235eb9539954f3e9997c7/msgpack-1.1.0-cp311-cp311-win_amd64.whl (74 kB)\n",
      "Requirement already satisfied: httpx>=0.25.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langgraph-sdk<0.2.0,>=0.1.42->langgraph) (0.28.1)\n",
      "Requirement already satisfied: orjson>=3.10.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langgraph-sdk<0.2.0,>=0.1.42->langgraph) (3.10.15)\n",
      "Requirement already satisfied: requests<3,>=2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core) (2.32.3)\n",
      "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core) (1.0.0)\n",
      "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core) (0.23.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from pydantic<3.0.0,>=2.5.2->langchain-core) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.27.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from pydantic<3.0.0,>=2.5.2->langchain-core) (2.27.2)\n",
      "Requirement already satisfied: anyio in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (4.8.0)\n",
      "Requirement already satisfied: certifi in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (2025.1.31)\n",
      "Requirement already satisfied: httpcore==1.* in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (1.0.7)\n",
      "Requirement already satisfied: idna in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (3.10)\n",
      "Requirement already satisfied: h11<0.15,>=0.13 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpcore==1.*->httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (0.14.0)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.125->langchain-core) (3.4.1)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.125->langchain-core) (2.3.0)\n",
      "Requirement already satisfied: sniffio>=1.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from anyio->httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (1.3.1)\n",
      "Installing collected packages: msgpack, langgraph-sdk, langgraph-checkpoint, langgraph\n",
      "Successfully installed langgraph-0.2.74 langgraph-checkpoint-2.0.16 langgraph-sdk-0.1.53 msgpack-1.1.0\n",
      "Looking in indexes: http://mirrors.aliyun.com/pypi/simple/\n",
      "Collecting langchain-openai\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/36/0e/816c5293eda67600d374bb8484a9adab873c9096489f6f91634581919f35/langchain_openai-0.3.7-py3-none-any.whl (55 kB)\n",
      "Requirement already satisfied: langchain-core<1.0.0,>=0.3.39 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-openai) (0.3.39)\n",
      "Requirement already satisfied: openai<2.0.0,>=1.58.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-openai) (1.61.1)\n",
      "Collecting tiktoken<1,>=0.7 (from langchain-openai)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl (893 kB)\n",
      "     ---------------------------------------- 0.0/893.9 kB ? eta -:--:--\n",
      "     ------------------------------------- 893.9/893.9 kB 10.2 MB/s eta 0:00:00\n",
      "Requirement already satisfied: langsmith<0.4,>=0.1.125 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.39->langchain-openai) (0.3.11)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.39->langchain-openai) (9.0.0)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.39->langchain-openai) (1.33)\n",
      "Requirement already satisfied: PyYAML>=5.3 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.39->langchain-openai) (6.0.2)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.39->langchain-openai) (24.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.39->langchain-openai) (4.12.2)\n",
      "Requirement already satisfied: pydantic<3.0.0,>=2.5.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core<1.0.0,>=0.3.39->langchain-openai) (2.10.6)\n",
      "Requirement already satisfied: anyio<5,>=3.5.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from openai<2.0.0,>=1.58.1->langchain-openai) (4.8.0)\n",
      "Requirement already satisfied: distro<2,>=1.7.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from openai<2.0.0,>=1.58.1->langchain-openai) (1.9.0)\n",
      "Requirement already satisfied: httpx<1,>=0.23.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from openai<2.0.0,>=1.58.1->langchain-openai) (0.28.1)\n",
      "Requirement already satisfied: jiter<1,>=0.4.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from openai<2.0.0,>=1.58.1->langchain-openai) (0.8.2)\n",
      "Requirement already satisfied: sniffio in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from openai<2.0.0,>=1.58.1->langchain-openai) (1.3.1)\n",
      "Requirement already satisfied: tqdm>4 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from openai<2.0.0,>=1.58.1->langchain-openai) (4.67.1)\n",
      "Collecting regex>=2022.1.18 (from tiktoken<1,>=0.7->langchain-openai)\n",
      "  Downloading http://mirrors.aliyun.com/pypi/packages/80/32/763a6cc01d21fb3819227a1cc3f60fd251c13c37c27a73b8ff4315433a8e/regex-2024.11.6-cp311-cp311-win_amd64.whl (274 kB)\n",
      "Requirement already satisfied: requests>=2.26.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from tiktoken<1,>=0.7->langchain-openai) (2.32.3)\n",
      "Requirement already satisfied: idna>=2.8 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from anyio<5,>=3.5.0->openai<2.0.0,>=1.58.1->langchain-openai) (3.10)\n",
      "Requirement already satisfied: certifi in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.58.1->langchain-openai) (2025.1.31)\n",
      "Requirement already satisfied: httpcore==1.* in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.58.1->langchain-openai) (1.0.7)\n",
      "Requirement already satisfied: h11<0.15,>=0.13 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai<2.0.0,>=1.58.1->langchain-openai) (0.14.0)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core<1.0.0,>=0.3.39->langchain-openai) (3.0.0)\n",
      "Requirement already satisfied: orjson<4.0.0,>=3.9.14 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core<1.0.0,>=0.3.39->langchain-openai) (3.10.15)\n",
      "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core<1.0.0,>=0.3.39->langchain-openai) (1.0.0)\n",
      "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core<1.0.0,>=0.3.39->langchain-openai) (0.23.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from pydantic<3.0.0,>=2.5.2->langchain-core<1.0.0,>=0.3.39->langchain-openai) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.27.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from pydantic<3.0.0,>=2.5.2->langchain-core<1.0.0,>=0.3.39->langchain-openai) (2.27.2)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests>=2.26.0->tiktoken<1,>=0.7->langchain-openai) (3.4.1)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests>=2.26.0->tiktoken<1,>=0.7->langchain-openai) (2.3.0)\n",
      "Requirement already satisfied: colorama in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from tqdm>4->openai<2.0.0,>=1.58.1->langchain-openai) (0.4.6)\n",
      "Installing collected packages: regex, tiktoken, langchain-openai\n",
      "Successfully installed langchain-openai-0.3.7 regex-2024.11.6 tiktoken-0.9.0\n"
     ]
    }
   ],
   "source": [
    "! pip install langchain\n",
    "! pip install langchain-core langgraph\n",
    "\n",
    "! pip install langchain-openai"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "AIMessage(content='你好! 我是阿里云开发的一款超大规模语言模型，我叫通义千问。', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 21, 'prompt_tokens': 14, 'total_tokens': 35, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'qwen-turbo-1101', 'system_fingerprint': None, 'finish_reason': 'stop', 'logprobs': None}, id='run-6b983d4d-3e92-44b1-8c40-291b8b094b55-0', usage_metadata={'input_tokens': 14, 'output_tokens': 21, 'total_tokens': 35, 'input_token_details': {}, 'output_token_details': {}})"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "from langchain_core.messages import HumanMessage\n",
    "\n",
    "base_url = \"https://dashscope.aliyuncs.com/compatible-mode/v1\"\n",
    "api_key = \"sk-70a3c6b01ceb4b969063c9ca958a4f2e\"\n",
    "model_name = \"qwen-turbo-2024-11-01\"\n",
    "\n",
    "human_message = HumanMessage(content = '你好, 你是谁?')\n",
    "\n",
    "model = ChatOpenAI(temperature=0, model_name=model_name, api_key=api_key, base_url=base_url)\n",
    "\n",
    "model.invoke([human_message])\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "当然可以。以下是一个详细的大语言模型提示词，用于帮助你润色一本短片小说，包括纠正标点符号和修改错别字：\n",
      "\n",
      "---\n",
      "\n",
      "**提示词：**\n",
      "\n",
      "\"你好，我有一本短片小说需要润色。请仔细检查并修正以下文本中的标点符号错误、错别字和其他语法问题。请尽量保持原文的风格和语气，仅在必要时进行适当的修改以提高整体流畅性和可读性。以下是需要润色的小说内容：\"\n",
      "\n",
      "[在这里粘贴你的短片小说内容]\n",
      "\n",
      "\"请在回复中提供润色后的版本，并尽可能地指出所有被修改的部分。谢谢！\"\n",
      "\n",
      "---\n",
      "\n",
      "这样设置提示词可以帮助大语言模型更好地理解你的需求，并提供高质量的润色服务。希望这对你有帮助！\n"
     ]
    }
   ],
   "source": [
    "from langchain_core.prompts import ChatPromptTemplate\n",
    "from langchain_openai import ChatOpenAI\n",
    "from langchain_core.messages import HumanMessage\n",
    "\n",
    "base_url = \"https://dashscope.aliyuncs.com/compatible-mode/v1\"\n",
    "api_key = \"sk-70a3c6b01ceb4b969063c9ca958a4f2e\"\n",
    "model_name = \"qwen-turbo-2024-11-01\"\n",
    "\n",
    "human_message = HumanMessage(content = '你好, 你是谁?')\n",
    "\n",
    "model = ChatOpenAI(temperature=0, model_name=model_name, api_key=api_key, base_url=base_url)\n",
    "\n",
    "prompt_template = ChatPromptTemplate(\n",
    "    [\n",
    "        (\"system\", \"你是一个AI助手\"),\n",
    "        (\"user\", \"你好, 帮我写一个关于'{topic}'的大语言模型提示词\")\n",
    "    ]\n",
    ")\n",
    "\n",
    "\n",
    "\n",
    "chain = prompt_template | model\n",
    "\n",
    "response = chain.invoke({\"topic\": \"如何使用大语言模型来给一本短片小说润色, 包括标点符号纠正, 错别字修改等\"})\n",
    "\n",
    "print(response.content)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "当然，以下是一个关于“小说洗稿”的大语言模型提示词示例。你可以根据具体需求进行调整：\n",
      "\n",
      "---\n",
      "\n",
      "**提示词：**\n",
      "\n",
      "你好，我需要帮助进行小说洗稿的工作。洗稿是指通过对原文本进行修改和重述，使其在保持原意的基础上具有不同的表达方式，从而避免版权问题或用于创作新的作品。\n",
      "\n",
      "请根据以下输入的小说片段，进行洗稿处理，生成一段与原文意思相近但表述不同的文本。注意保留主要情节和人物关系，但可以适当改变描述方式、词汇选择以及句子结构。\n",
      "\n",
      "**输入文本示例：**\n",
      "```\n",
      "夜幕降临，小镇的灯火逐渐亮起。李明独自一人漫步在街头，心中充满了忧郁。他刚刚得知自己被公司裁员的消息，未来一片迷茫。\n",
      "```\n",
      "\n",
      "**期望输出：**\n",
      "```\n",
      "随着夜色渐浓，小镇上的灯光逐一点亮。李明独自走在路上，内心充满了愁绪。他刚从雇主那里收到了被解雇的通知，对未来感到一片茫然。\n",
      "```\n",
      "\n",
      "---\n",
      "\n",
      "希望这个提示词对你有帮助！如果你有任何特定的要求或需要进一步调整，请告诉我。\n"
     ]
    }
   ],
   "source": [
    "response = chain.invoke({\"topic\": \"小说洗稿\"})\n",
    "print(response.content)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "目前的俾路支解放军成立于2000年，但其源头可以追溯到1964年冷战期间，当时苏联克格勃利用当地左翼学生运动成立了俾路支民族主义组织。21世纪初，巴基斯坦指控俾路支解放军受到印度的操控，并指出印度在阿富汗的坎大哈与贾拉拉巴德等地的领事馆为其提供军事、经济资助和武装训练，目的是为了制造巴基斯坦的动荡。一些观察家也认为印度暗中支持俾路支解放军，但俾路支解放军的领导人否认其组织与印度有任何关联。\n",
      "\n",
      "一则维基解密泄漏的电报显示，巴基斯坦议员们认为印度、阿拉伯联合酋长国与俄罗斯政府均暗中援助和支持俾路支解放军。另一则维基解密泄露的文件显示，巴基斯坦参议院内政部长Rehman Malik发表了有争议的言论，称印度、阿富汗和俄罗斯支持俾路支解放军（BLA），煽动该省的叛乱。2023年12月27日，据巴基斯坦《观察家报》报道，有俾路支解放军指挥官投降，称当地的抗争背后有印度的参与。\n"
     ]
    }
   ],
   "source": [
    "from langchain_core.prompts import ChatPromptTemplate\n",
    "from langchain_openai import ChatOpenAI\n",
    "from langchain_core.messages import HumanMessage\n",
    "\n",
    "base_url = \"https://dashscope.aliyuncs.com/compatible-mode/v1\"\n",
    "api_key = \"sk-70a3c6b01ceb4b969063c9ca958a4f2e\"\n",
    "model_name = \"qwen-turbo-2024-11-01\"\n",
    "\n",
    "human_message = HumanMessage(content = '你好, 你是谁?')\n",
    "\n",
    "model = ChatOpenAI(temperature=0, model_name=model_name, api_key=api_key, base_url=base_url)\n",
    "\n",
    "user_prompt_text = \"\"\"\n",
    "**提示词：**\n",
    "\n",
    "\"请仔细阅读并润色下面这篇短片小说。润色时，请注意以下几个方面：\n",
    "1. **标点符号**：确保所有句子都有正确的标点符号，如句号、逗号、分号、冒号、引号和问号等。\n",
    "2. **错别字**：检查并更正任何拼写错误或错别字。\n",
    "3. **语法修正**：确保所有句子都符合语法规则，包括主谓一致、时态一致、冠词使用等。\n",
    "4. **流畅性**：使语言更加流畅，避免重复词语和表达方式，增强叙述的连贯性和可读性。\n",
    "\n",
    "请根据上述要求对下面的小说进行润色，并保持原意不变。\n",
    "\n",
    "`{text}`\n",
    "\n",
    "谢谢！\"\n",
    "\"\"\"\n",
    "\n",
    "prompt_template = ChatPromptTemplate(\n",
    "    [\n",
    "        (\"system\", \"你是一个AI助手, 你可以帮我完成NLP领域的任务\"),\n",
    "        (\"user\", user_prompt_text)\n",
    "    ]\n",
    ")\n",
    "\n",
    "\n",
    "\n",
    "chain = prompt_template | model\n",
    "\n",
    "novel_text = \"\"\"\n",
    "\n",
    "\n",
    "目前的俾路支解放军成立于2000年，但其源头可以追述到1964年冷战期间苏联克b格勃利用当地左翼学生运动成立的俾路支民族主义组织[7]。21世纪初，巴基斯坦指控俾路支解放军为印度所操控，且印度于阿富汗坎达哈与贾拉拉巴德等地的领事馆为其提供军事、经济资助和武装训练，目的为造成巴基斯坦动荡[26][27]。有一些观察家也认为印度暗中支持俾路支解放军[28]，但俾路支解放军的领导人否认其组织与印度有任何关联[29]。\n",
    "\n",
    "一则维基解密泄漏的电报显示巴基斯坦议员认为印度、阿拉伯联合酋长国与俄罗斯政府均暗中援助、支持俾路支解放军[30]。另一则维基解密泄露文件显示巴基斯坦参议院内政部长 Rehman Malik 发表了有争议的言论，称印度、阿富汗和俄罗斯支持俾路支解放军 (BLA) 煽动该省的叛乱[31]。2023年12月27日，据巴基斯坦《观察家报》报道有俾路支解放军指挥官投降称当地抗争有印度幕后参与[32]。\n",
    "\n",
    "\"\"\"\n",
    "\n",
    "response = chain.i女oke({\"text\": novel_text})\n",
    "\n",
    "print(response.content)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: http://mirrors.aliyun.com/pypi/simple/\n",
      "Requirement already satisfied: langgraph in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (0.2.74)\n",
      "Requirement already satisfied: langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langgraph) (0.3.39)\n",
      "Requirement already satisfied: langgraph-checkpoint<3.0.0,>=2.0.10 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langgraph) (2.0.16)\n",
      "Requirement already satisfied: langgraph-sdk<0.2.0,>=0.1.42 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langgraph) (0.1.53)\n",
      "Requirement already satisfied: langsmith<0.4,>=0.1.125 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (0.3.11)\n",
      "Requirement already satisfied: tenacity!=8.4.0,<10.0.0,>=8.1.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (9.0.0)\n",
      "Requirement already satisfied: jsonpatch<2.0,>=1.33 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (1.33)\n",
      "Requirement already satisfied: PyYAML>=5.3 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (6.0.2)\n",
      "Requirement already satisfied: packaging<25,>=23.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (24.2)\n",
      "Requirement already satisfied: typing-extensions>=4.7 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (4.12.2)\n",
      "Requirement already satisfied: pydantic<3.0.0,>=2.5.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (2.10.6)\n",
      "Requirement already satisfied: msgpack<2.0.0,>=1.1.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langgraph-checkpoint<3.0.0,>=2.0.10->langgraph) (1.1.0)\n",
      "Requirement already satisfied: httpx>=0.25.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langgraph-sdk<0.2.0,>=0.1.42->langgraph) (0.28.1)\n",
      "Requirement already satisfied: orjson>=3.10.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langgraph-sdk<0.2.0,>=0.1.42->langgraph) (3.10.15)\n",
      "Requirement already satisfied: anyio in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (4.8.0)\n",
      "Requirement already satisfied: certifi in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (2025.1.31)\n",
      "Requirement already satisfied: httpcore==1.* in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (1.0.7)\n",
      "Requirement already satisfied: idna in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (3.10)\n",
      "Requirement already satisfied: h11<0.15,>=0.13 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from httpcore==1.*->httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (0.14.0)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from jsonpatch<2.0,>=1.33->langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (3.0.0)\n",
      "Requirement already satisfied: requests<3,>=2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (2.32.3)\n",
      "Requirement already satisfied: requests-toolbelt<2.0.0,>=1.0.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (1.0.0)\n",
      "Requirement already satisfied: zstandard<0.24.0,>=0.23.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from langsmith<0.4,>=0.1.125->langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (0.23.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from pydantic<3.0.0,>=2.5.2->langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.27.2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from pydantic<3.0.0,>=2.5.2->langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (2.27.2)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.125->langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (3.4.1)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from requests<3,>=2->langsmith<0.4,>=0.1.125->langchain-core!=0.3.0,!=0.3.1,!=0.3.10,!=0.3.11,!=0.3.12,!=0.3.13,!=0.3.14,!=0.3.15,!=0.3.16,!=0.3.17,!=0.3.18,!=0.3.19,!=0.3.2,!=0.3.20,!=0.3.21,!=0.3.22,!=0.3.3,!=0.3.4,!=0.3.5,!=0.3.6,!=0.3.7,!=0.3.8,!=0.3.9,<0.4.0,>=0.2.43->langgraph) (2.3.0)\n",
      "Requirement already satisfied: sniffio>=1.1 in c:\\users\\rf.yin\\.conda\\envs\\pydantic-ai\\lib\\site-packages (from anyio->httpx>=0.25.2->langgraph-sdk<0.2.0,>=0.1.42->langgraph) (1.3.1)\n"
     ]
    }
   ],
   "source": [
    "! pip install langgraph"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: http://mirrors.aliyun.com/pypi/simple/\n",
      "Requirement already satisfied: pydantic in c:\\users\\rf.yin\\.conda\\envs\\langchain_0_3\\lib\\site-packages (2.10.0)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in c:\\users\\rf.yin\\.conda\\envs\\langchain_0_3\\lib\\site-packages (from pydantic) (0.7.0)\n",
      "Requirement already satisfied: pydantic-core==2.27.0 in c:\\users\\rf.yin\\.conda\\envs\\langchain_0_3\\lib\\site-packages (from pydantic) (2.27.0)\n",
      "Requirement already satisfied: typing-extensions>=4.12.2 in c:\\users\\rf.yin\\.conda\\envs\\langchain_0_3\\lib\\site-packages (from pydantic) (4.12.2)\n"
     ]
    }
   ],
   "source": [
    "! pip install pydantic"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "ename": "ValidationError",
     "evalue": "1 validation error for NovelDataListModel\nnovel_list\n  Field required [type=missing, input_value={'orginal_text': '故事...段的基本国情。'}, input_type=dict]\n    For further information visit https://errors.pydantic.dev/2.10/v/missing",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mValidationError\u001b[0m                           Traceback (most recent call last)",
      "Cell \u001b[1;32mIn[4], line 207\u001b[0m\n\u001b[0;32m    185\u001b[0m chain \u001b[38;5;241m=\u001b[39m prompt_template \u001b[38;5;241m|\u001b[39m structured_llm\n\u001b[0;32m    187\u001b[0m novel_text \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\"\"\u001b[39m\n\u001b[0;32m    188\u001b[0m \u001b[38;5;124m故事开始的那天，我照例是上着班，打扫完一片狼藉的宠物店，走出店门口，在隔壁便利店买了一包五块钱的软白沙，疲惫的靠着墙点了一支烟。\u001b[39m\n\u001b[0;32m    189\u001b[0m \u001b[38;5;124m店门口的台阶上，一字排开坐了一行人，有老有少，有男有女。有个白嫩的小萝莉，全身汗津津的，bra在校服下若隐若现。青春，真可爱青春。\u001b[39m\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    203\u001b[0m \u001b[38;5;124m在这家绝望的宠物店，做着绝望的工作，领着着绝望的工资，老板心眼太多，手下心眼太少；加薪是个童话，加班才是现阶段的基本国情。\u001b[39m\n\u001b[0;32m    204\u001b[0m \u001b[38;5;124m\"\"\"\u001b[39m\n\u001b[1;32m--> 207\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[43mchain\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minvoke\u001b[49m\u001b[43m(\u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mnovel_text\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mnovel_text\u001b[49m\u001b[43m}\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    208\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;28mtype\u001b[39m(response))\n\u001b[0;32m    209\u001b[0m \u001b[38;5;28mprint\u001b[39m(response\u001b[38;5;241m.\u001b[39mmodel_dump_json(indent\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m4\u001b[39m))\n",
      "File \u001b[1;32mc:\\Users\\rf.yin\\.conda\\envs\\langchain_0_3\\Lib\\site-packages\\langchain_core\\runnables\\base.py:3024\u001b[0m, in \u001b[0;36mRunnableSequence.invoke\u001b[1;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[0;32m   3022\u001b[0m             \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m context\u001b[38;5;241m.\u001b[39mrun(step\u001b[38;5;241m.\u001b[39minvoke, \u001b[38;5;28minput\u001b[39m, config, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m   3023\u001b[0m         \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 3024\u001b[0m             \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[43mcontext\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstep\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minvoke\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m   3025\u001b[0m \u001b[38;5;66;03m# finish the root run\u001b[39;00m\n\u001b[0;32m   3026\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n",
      "File \u001b[1;32mc:\\Users\\rf.yin\\.conda\\envs\\langchain_0_3\\Lib\\site-packages\\langchain_core\\output_parsers\\base.py:193\u001b[0m, in \u001b[0;36mBaseOutputParser.invoke\u001b[1;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[0;32m    186\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21minvoke\u001b[39m(\n\u001b[0;32m    187\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    188\u001b[0m     \u001b[38;5;28minput\u001b[39m: Union[\u001b[38;5;28mstr\u001b[39m, BaseMessage],\n\u001b[0;32m    189\u001b[0m     config: Optional[RunnableConfig] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m    190\u001b[0m     \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[0;32m    191\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m T:\n\u001b[0;32m    192\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28minput\u001b[39m, BaseMessage):\n\u001b[1;32m--> 193\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_with_config\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    194\u001b[0m \u001b[43m            \u001b[49m\u001b[38;5;28;43;01mlambda\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43minner_input\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mparse_result\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    195\u001b[0m \u001b[43m                \u001b[49m\u001b[43m[\u001b[49m\u001b[43mChatGeneration\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmessage\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minner_input\u001b[49m\u001b[43m)\u001b[49m\u001b[43m]\u001b[49m\n\u001b[0;32m    196\u001b[0m \u001b[43m            \u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    197\u001b[0m \u001b[43m            \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m    198\u001b[0m \u001b[43m            \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    199\u001b[0m \u001b[43m            \u001b[49m\u001b[43mrun_type\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mparser\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m    200\u001b[0m \u001b[43m        \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    201\u001b[0m     \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m    202\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_with_config(\n\u001b[0;32m    203\u001b[0m             \u001b[38;5;28;01mlambda\u001b[39;00m inner_input: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mparse_result([Generation(text\u001b[38;5;241m=\u001b[39minner_input)]),\n\u001b[0;32m    204\u001b[0m             \u001b[38;5;28minput\u001b[39m,\n\u001b[0;32m    205\u001b[0m             config,\n\u001b[0;32m    206\u001b[0m             run_type\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mparser\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m    207\u001b[0m         )\n",
      "File \u001b[1;32mc:\\Users\\rf.yin\\.conda\\envs\\langchain_0_3\\Lib\\site-packages\\langchain_core\\runnables\\base.py:1927\u001b[0m, in \u001b[0;36mRunnable._call_with_config\u001b[1;34m(self, func, input, config, run_type, serialized, **kwargs)\u001b[0m\n\u001b[0;32m   1923\u001b[0m     context \u001b[38;5;241m=\u001b[39m copy_context()\n\u001b[0;32m   1924\u001b[0m     context\u001b[38;5;241m.\u001b[39mrun(_set_config_context, child_config)\n\u001b[0;32m   1925\u001b[0m     output \u001b[38;5;241m=\u001b[39m cast(\n\u001b[0;32m   1926\u001b[0m         Output,\n\u001b[1;32m-> 1927\u001b[0m         \u001b[43mcontext\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m   1928\u001b[0m \u001b[43m            \u001b[49m\u001b[43mcall_func_with_variable_args\u001b[49m\u001b[43m,\u001b[49m\u001b[43m  \u001b[49m\u001b[38;5;66;43;03m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[0;32m   1929\u001b[0m \u001b[43m            \u001b[49m\u001b[43mfunc\u001b[49m\u001b[43m,\u001b[49m\u001b[43m  \u001b[49m\u001b[38;5;66;43;03m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[0;32m   1930\u001b[0m \u001b[43m            \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m  \u001b[49m\u001b[38;5;66;43;03m# type: ignore[arg-type]\u001b[39;49;00m\n\u001b[0;32m   1931\u001b[0m \u001b[43m            \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m   1932\u001b[0m \u001b[43m            \u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m   1933\u001b[0m \u001b[43m            \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m   1934\u001b[0m \u001b[43m        \u001b[49m\u001b[43m)\u001b[49m,\n\u001b[0;32m   1935\u001b[0m     )\n\u001b[0;32m   1936\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m   1937\u001b[0m     run_manager\u001b[38;5;241m.\u001b[39mon_chain_error(e)\n",
      "File \u001b[1;32mc:\\Users\\rf.yin\\.conda\\envs\\langchain_0_3\\Lib\\site-packages\\langchain_core\\runnables\\config.py:396\u001b[0m, in \u001b[0;36mcall_func_with_variable_args\u001b[1;34m(func, input, config, run_manager, **kwargs)\u001b[0m\n\u001b[0;32m    394\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m run_manager \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m accepts_run_manager(func):\n\u001b[0;32m    395\u001b[0m     kwargs[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrun_manager\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m run_manager\n\u001b[1;32m--> 396\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mc:\\Users\\rf.yin\\.conda\\envs\\langchain_0_3\\Lib\\site-packages\\langchain_core\\output_parsers\\base.py:194\u001b[0m, in \u001b[0;36mBaseOutputParser.invoke.<locals>.<lambda>\u001b[1;34m(inner_input)\u001b[0m\n\u001b[0;32m    186\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21minvoke\u001b[39m(\n\u001b[0;32m    187\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    188\u001b[0m     \u001b[38;5;28minput\u001b[39m: Union[\u001b[38;5;28mstr\u001b[39m, BaseMessage],\n\u001b[0;32m    189\u001b[0m     config: Optional[RunnableConfig] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m    190\u001b[0m     \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[0;32m    191\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m T:\n\u001b[0;32m    192\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28minput\u001b[39m, BaseMessage):\n\u001b[0;32m    193\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_with_config(\n\u001b[1;32m--> 194\u001b[0m             \u001b[38;5;28;01mlambda\u001b[39;00m inner_input: \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mparse_result\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    195\u001b[0m \u001b[43m                \u001b[49m\u001b[43m[\u001b[49m\u001b[43mChatGeneration\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmessage\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minner_input\u001b[49m\u001b[43m)\u001b[49m\u001b[43m]\u001b[49m\n\u001b[0;32m    196\u001b[0m \u001b[43m            \u001b[49m\u001b[43m)\u001b[49m,\n\u001b[0;32m    197\u001b[0m             \u001b[38;5;28minput\u001b[39m,\n\u001b[0;32m    198\u001b[0m             config,\n\u001b[0;32m    199\u001b[0m             run_type\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mparser\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m    200\u001b[0m         )\n\u001b[0;32m    201\u001b[0m     \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m    202\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_with_config(\n\u001b[0;32m    203\u001b[0m             \u001b[38;5;28;01mlambda\u001b[39;00m inner_input: \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mparse_result([Generation(text\u001b[38;5;241m=\u001b[39minner_input)]),\n\u001b[0;32m    204\u001b[0m             \u001b[38;5;28minput\u001b[39m,\n\u001b[0;32m    205\u001b[0m             config,\n\u001b[0;32m    206\u001b[0m             run_type\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mparser\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m    207\u001b[0m         )\n",
      "File \u001b[1;32mc:\\Users\\rf.yin\\.conda\\envs\\langchain_0_3\\Lib\\site-packages\\langchain_core\\output_parsers\\openai_tools.py:298\u001b[0m, in \u001b[0;36mPydanticToolsParser.parse_result\u001b[1;34m(self, result, partial)\u001b[0m\n\u001b[0;32m    296\u001b[0m             \u001b[38;5;28;01mcontinue\u001b[39;00m\n\u001b[0;32m    297\u001b[0m         \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 298\u001b[0m             \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m    299\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfirst_tool_only:\n\u001b[0;32m    300\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m pydantic_objects[\u001b[38;5;241m0\u001b[39m] \u001b[38;5;28;01mif\u001b[39;00m pydantic_objects \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n",
      "File \u001b[1;32mc:\\Users\\rf.yin\\.conda\\envs\\langchain_0_3\\Lib\\site-packages\\langchain_core\\output_parsers\\openai_tools.py:293\u001b[0m, in \u001b[0;36mPydanticToolsParser.parse_result\u001b[1;34m(self, result, partial)\u001b[0m\n\u001b[0;32m    288\u001b[0m         msg \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m    289\u001b[0m             \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mTool arguments must be specified as a dict, received: \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m    290\u001b[0m             \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mres[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124margs\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m    291\u001b[0m         )\n\u001b[0;32m    292\u001b[0m         \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(msg)\n\u001b[1;32m--> 293\u001b[0m     pydantic_objects\u001b[38;5;241m.\u001b[39mappend(\u001b[43mname_dict\u001b[49m\u001b[43m[\u001b[49m\u001b[43mres\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtype\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m]\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mres\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43margs\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[0;32m    294\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (ValidationError, \u001b[38;5;167;01mValueError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m    295\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m partial:\n",
      "File \u001b[1;32mc:\\Users\\rf.yin\\.conda\\envs\\langchain_0_3\\Lib\\site-packages\\pydantic\\main.py:214\u001b[0m, in \u001b[0;36mBaseModel.__init__\u001b[1;34m(self, **data)\u001b[0m\n\u001b[0;32m    212\u001b[0m \u001b[38;5;66;03m# `__tracebackhide__` tells pytest and some other tools to omit this function from tracebacks\u001b[39;00m\n\u001b[0;32m    213\u001b[0m __tracebackhide__ \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[1;32m--> 214\u001b[0m validated_self \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m__pydantic_validator__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mvalidate_python\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mself_instance\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m    215\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m validated_self:\n\u001b[0;32m    216\u001b[0m     warnings\u001b[38;5;241m.\u001b[39mwarn(\n\u001b[0;32m    217\u001b[0m         \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mA custom validator is returning a value other than `self`.\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m'\u001b[39m\n\u001b[0;32m    218\u001b[0m         \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mReturning anything other than `self` from a top level model validator isn\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mt supported when validating via `__init__`.\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m    219\u001b[0m         \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mSee the `model_validator` docs (https://docs.pydantic.dev/latest/concepts/validators/#model-validators) for more details.\u001b[39m\u001b[38;5;124m'\u001b[39m,\n\u001b[0;32m    220\u001b[0m         stacklevel\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m2\u001b[39m,\n\u001b[0;32m    221\u001b[0m     )\n",
      "\u001b[1;31mValidationError\u001b[0m: 1 validation error for NovelDataListModel\nnovel_list\n  Field required [type=missing, input_value={'orginal_text': '故事...段的基本国情。'}, input_type=dict]\n    For further information visit https://errors.pydantic.dev/2.10/v/missing"
     ]
    }
   ],
   "source": [
    "from langchain_core.prompts import ChatPromptTemplate\n",
    "from langchain_openai import ChatOpenAI\n",
    "from langchain_core.messages import HumanMessage\n",
    "from typing import Optional, List\n",
    "from pydantic import BaseModel, Field\n",
    "\n",
    "\n",
    "# 使用pydantic来定义一个数据模型\n",
    "class NovelData(BaseModel):\n",
    "    # 定义一个名为 NovelData 的类，继承自 BaseModel，用于表示小说数据模型\n",
    "    text: str = Field(description=\"拆分后的文本\")\n",
    "    # 定义一个名为 text 的属性，类型为字符串，用于存储拆分后的文本内容\n",
    "    # 使用 Field 函数添加描述信息，说明该字段用于存储拆分后的文本\n",
    "    type: str = Field(description=\"该段文本的类型， 可能值为： 旁白， 对话\")\n",
    "    # 定义一个名为 type 的属性，类型为字符串，用于存储该段文本的类型\n",
    "    # 使用 Field 函数添加描述信息，说明该字段可能的取值为“旁白”或“对话”\n",
    "    character: Optional[str] = Field(description=\"该段文本的说话角色, 请一定要结合上下文推测说话的角色名称。如果是旁白或引申说明，则该字段为空；如果是对话或内心独白，则该字段为说话角色的名称； \")\n",
    "    # 定义一个名为 character 的属性，类型为可选字符串，用于存储该段文本的说话角色\n",
    "    # 使用 Field 函数添加描述信息，说明该字段用于存储说话角色的名称，需要结合上下文推测\n",
    "    # 如果是旁白或引申说明，则该字段为空；如果是对话或内心独白，则该字段为说话角色的名称\n",
    "    emotion: Optional[str] = Field(description=\"该段文本的说话角色的情感， 可能的取值为： '快乐', '悲伤', '愤怒', '恐惧', '惊讶', '焦虑', '羞愧', '自豪', '嫉妒', '爱', '失望', '困惑', '希望', '绝望', '同情', '厌恶', '感激', '无聊', '兴奋', '孤独', '内疚', '骄傲', '谦卑', '渴望', '满足', '好奇', '紧张', '宽慰', '疲惫', '振奋'\")\n",
    "\n",
    "class NovelDataListModel(BaseModel):\n",
    "    orginal_text: str = Field(description=\"小说的原始段落文本, 一个段落文本, 可能会被拆分成多个句子, 属于不同的'角色对话'或者'旁白'\")\n",
    "    novel_list: List[NovelData] = Field(description=\"句子列表\")\n",
    "\n",
    "base_url = \"https://dashscope.aliyuncs.com/compatible-mode/v1\"\n",
    "api_key = \"sk-70a3c6b01ceb4b969063c9ca958a4f2e\"\n",
    "model_name = \"qwen-turbo-2024-11-01\"\n",
    "\n",
    "human_message = HumanMessage(content = '你好, 你是谁?')\n",
    "\n",
    "model = ChatOpenAI(temperature=0, model_name=model_name, api_key=api_key, base_url=base_url)\n",
    "\n",
    "structured_llm = model.with_structured_output(NovelDataListModel)\n",
    "\n",
    "user_prompt_text = \"\"\"\n",
    "请将以下文本转换成适合有声书的脚本格式。在处理文本时。\n",
    "\n",
    "参考以下示例：\n",
    "\n",
    "## 示例1\n",
    "原文: “张帆，干嘛呢？是不是又偷懒？”一个粗里粗气的声音将我从沉思中惊醒。\n",
    "输出: \n",
    "{{\n",
    "    original_text: \"张帆，干嘛呢？是不是又偷懒？一个粗里粗气的声音将我从沉思中惊醒。\",\n",
    "    novel_list: [\n",
    "    {{\n",
    "        \"text\": \"张帆，干嘛呢？是不是又偷懒？\",\n",
    "        \"type\": \"对话\",\n",
    "        \"character\": \"荷花\",\n",
    "        \"emotion\": \"愤怒\"\n",
    "    }},\n",
    "    {{\n",
    "        \"text\": \"一个粗里粗气的声音将我从沉思中惊醒。\",\n",
    "        \"type\": \"旁白\",\n",
    "        \"character\": null,\n",
    "        \"emotion\": \"厌恶\"\n",
    "    }}\n",
    "]\n",
    "}}\n",
    "\n",
    "\n",
    "\n",
    "## 示例2\n",
    "原文: “这不可能！”李明大声喊道，他的脸因为愤怒而涨得通红。“你一定在骗我。”\n",
    "输出: \n",
    "{{\n",
    "    original_text: \"这不可能！李明大声喊道，他的脸因为愤怒而涨得通红。你一定在骗我。\",\n",
    "    novel_list: [\n",
    "    {{\n",
    "        \"text\": \"这不可能！\",\n",
    "        \"type\": \"对话\",\n",
    "        \"character\": \"李明\",\n",
    "        \"emotion\": \"愤怒\"\n",
    "    }},\n",
    "    {{\n",
    "        \"text\": \"李明大声喊道，他的脸因为愤怒而涨得通红。\",\n",
    "        \"type\": \"旁白\",\n",
    "        \"character\": null,\n",
    "        \"emotion\": \"厌恶\"\n",
    "    }},\n",
    "    {{\n",
    "        \"text\": \"你一定在骗我。\",\n",
    "        \"type\": \"对话\",\n",
    "        \"character\": \"李明\",\n",
    "        \"emotion\": \"愤怒\"\n",
    "    }}\n",
    "]\n",
    "}}\n",
    "\n",
    "\n",
    "\n",
    "## 示例3\n",
    "原文: “你确定这样做是对的吗？”王丽问，她的声音充满了不确定。\n",
    "输出: \n",
    "{{\n",
    "    original_text: \"你确定这样做是对的吗？王丽问，她的声音充满了不确定。\",\n",
    "    novel_list: [\n",
    "    {{\n",
    "        \"text\": \"你确定这样做是对的吗？\",\n",
    "        \"type\": \"对话\",\n",
    "        \"character\": \"王丽\",\n",
    "        \"emotion\": \"惊讶\"\n",
    "    }},\n",
    "    {{\n",
    "        \"text\": \"王丽问，她的声音充满了不确定。\",\n",
    "        \"type\": \"旁白\",\n",
    "        \"character\": null,\n",
    "        \"emotion\": \"惊讶\"\n",
    "    }}\n",
    "]\n",
    "}}\n",
    "\n",
    "\n",
    "## 示例4\n",
    "原文: “我们走吧。”他轻轻地说，同时伸出手来拉她。\n",
    "输出: \n",
    "{{\n",
    "    original_text: \"我们走吧。他轻轻地说，同时伸出手来拉她。\",\n",
    "    novel_list: [\n",
    "    {{\n",
    "        \"text\": \"我们走吧。\",\n",
    "        \"type\": \"对话\",\n",
    "        \"character\": \"他\",\n",
    "        \"emotion\": \"振奋\"\n",
    "    }},\n",
    "    {{\n",
    "        \"text\": \"他轻轻地说，同时伸出手来拉她。\",\n",
    "        \"type\": \"旁白\",\n",
    "        \"character\": null,\n",
    "        \"emotion\": \"振奋\"\n",
    "    }}\n",
    "]\n",
    "}}\n",
    "\n",
    "\n",
    "## 示例5\n",
    "原文: 我把烟头丢掉，奴颜媚骨的问：“花姐有什么吩咐。”\n",
    "输出: \n",
    "{{\n",
    "    original_text: \"我把烟头丢掉，奴颜媚骨的问：“花姐有什么吩咐。”\",\n",
    "    novel_list: [\n",
    "    {{\n",
    "        \"text\": \"我把烟头丢掉，奴颜媚骨的问\",\n",
    "        \"type\": \"旁白\",\n",
    "        \"character\": \"我\",\n",
    "        \"emotion\": \"谦卑\"\n",
    "    }},\n",
    "    {{\n",
    "        \"text\": \"花姐有什么吩咐。\",\n",
    "        \"type\": \"对话\",\n",
    "        \"character\": \"我\",\n",
    "        \"emotion\": \"内疚\"\n",
    "    }}\n",
    "]\n",
    "}}\n",
    "\n",
    "\n",
    "下面是小说片段：\n",
    "`{novel_text}`\n",
    "\"\"\"\n",
    "\n",
    "prompt_template = ChatPromptTemplate(\n",
    "    [\n",
    "        (\"system\", \"\"\"\n",
    "你是一个专业的有声书脚本转换助手，你的任务是将给定的小说片段转换成适合录制有声书的脚本。请按照以下规则进行转换：\n",
    "\n",
    "1. **区分对话和旁白**：\n",
    "   - 识别并分离文本中的人物对话（通常位于双引号内）和旁白（描述性文字）。对话部分应被特别标注出来，以便于后续的声音演绎。\n",
    "   \n",
    "2. **角色对话处理**：\n",
    "   - 对于每一句人物对话，请指明说话者的身份（如果已知），并建议适当的情绪、语气或音调变化来表达该句话的情感背景。例如，“愤怒地说”、“轻柔地问”等。\n",
    "   - 如果原文中没有明确指出是谁在说话，请根据上下文合理推测，并在脚本中标注出可能的说话者。\n",
    "\n",
    "3. **旁白处理**：\n",
    "   - 将旁白部分用叙述的方式呈现，确保语言流畅且易于理解。旁白应当用来建立场景、描述动作或解释事件，帮助听众更好地想象故事的发展。\n",
    "   - 注意旁白中的情感色彩，如紧张、轻松、悲伤等，并通过语速、语调的变化传达给听众。\n",
    "\n",
    "4. **特殊说明**：\n",
    "   - 如果文本中有任何特殊的格式化要求（如强调某些词语、使用特定的停顿等），请在脚本中明确指出。\n",
    "   - 对于不常见的词汇或者专有名词，考虑添加发音指导以确保正确朗读。\n",
    "\n",
    "请严格按照上述指导原则执行转换任务，确保最终生成的有声书脚本能准确反映原文的情感和氛围，同时为听众提供愉悦的听觉体验。\n",
    "\n",
    "        \"\"\"),\n",
    "        (\"user\", user_prompt_text)\n",
    "    ]\n",
    ")\n",
    "\n",
    "\n",
    "\n",
    "chain = prompt_template | structured_llm\n",
    "\n",
    "novel_text = \"\"\"\n",
    "故事开始的那天，我照例是上着班，打扫完一片狼藉的宠物店，走出店门口，在隔壁便利店买了一包五块钱的软白沙，疲惫的靠着墙点了一支烟。\n",
    "店门口的台阶上，一字排开坐了一行人，有老有少，有男有女。有个白嫩的小萝莉，全身汗津津的，bra在校服下若隐若现。青春，真可爱青春。\n",
    "我叼着烟看着那个小萝莉，她一边打电话，一边眨巴眨巴眼睛看我，然后看向路边。我又抽了两口烟，一部宝马停在路边，小萝莉走过去，青春，真可爱青春。\n",
    "小萝莉开了宝马车的门上车，开车的是一个戴墨镜的秃顶大叔，大叔抱住了小萝莉，黑黝黝的手伸向了小萝莉。\n",
    "我在心里骂，禽兽。\n",
    "苦逼啊，我悟了，这个纸醉金迷的花花都市，并不是一个农村孩子的天堂。\n",
    "“张帆，干嘛呢？是不是又偷懒？”一个粗里粗气的声音将我从沉思中惊醒。\n",
    "一扭头，店长何花，老板是她干爹，我们叫她花姐，正怒目冷对着我。\n",
    "我把烟头丢掉，奴颜媚骨的问：“花姐有什么吩咐。”\n",
    "人在屋檐下，不得不低头。\n",
    "“我在店里忙得要死，你倒是闲的很，躲在这里偷懒抽烟，没点上进心，难怪你女朋友跟有钱人跑了。”\n",
    "看着她上下开合的两片薄薄殷红嘴唇，我已经在心里把它骂了一百遍。\n",
    "女友的出轨对我打击无疑是巨大的，偏偏每天来上班还要受到店长的好心提醒：这点事都干不好，难怪你女朋友跟人跑了！给狗洗澡都不会洗，难怪你女朋友跟人跑了！拖地都拖不干净，难怪你女朋友跟人跑了。\n",
    "我女朋友跟人跑了，跟拖地干不干净有毛线关系。\n",
    "“有个客户打电话来，要我们上门给它宠物洗澡！手脚利索点！”她把服务单塞给我。\n",
    "在这家绝望的宠物店，做着绝望的工作，领着着绝望的工资，老板心眼太多，手下心眼太少；加薪是个童话，加班才是现阶段的基本国情。\n",
    "\"\"\"\n",
    "\n",
    "\n",
    "response = chain.invoke({\"novel_text\": novel_text})\n",
    "print(type(response))\n",
    "print(response.model_dump_json(indent=4))"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "pydantic-ai",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.11"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
