{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "b171e9c8-b78a-4b4e-8b96-8515e640c7d5",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
      "tensorflow-intel 2.13.0 requires protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.20.3, but you have protobuf 5.29.4 which is incompatible.\n",
      "tensorflow-intel 2.13.0 requires typing-extensions<4.6.0,>=3.6.6, but you have typing-extensions 4.13.2 which is incompatible.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting unstructured\n",
      "  Downloading unstructured-0.11.8-py3-none-any.whl.metadata (26 kB)\n",
      "Collecting chardet (from unstructured)\n",
      "  Downloading chardet-5.2.0-py3-none-any.whl.metadata (3.4 kB)\n",
      "Collecting filetype (from unstructured)\n",
      "  Downloading filetype-1.2.0-py2.py3-none-any.whl.metadata (6.5 kB)\n",
      "Collecting python-magic (from unstructured)\n",
      "  Downloading python_magic-0.4.27-py2.py3-none-any.whl.metadata (5.8 kB)\n",
      "Collecting lxml (from unstructured)\n",
      "  Downloading lxml-5.3.2-cp38-cp38-win_amd64.whl.metadata (3.4 kB)\n",
      "Collecting nltk (from unstructured)\n",
      "  Downloading nltk-3.9.1-py3-none-any.whl.metadata (2.9 kB)\n",
      "Collecting tabulate (from unstructured)\n",
      "  Downloading tabulate-0.9.0-py3-none-any.whl.metadata (34 kB)\n",
      "Requirement already satisfied: requests in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured) (2.32.3)\n",
      "Requirement already satisfied: beautifulsoup4 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured) (4.12.3)\n",
      "Collecting emoji (from unstructured)\n",
      "  Downloading emoji-2.14.1-py3-none-any.whl.metadata (5.7 kB)\n",
      "Requirement already satisfied: dataclasses-json in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured) (0.6.7)\n",
      "Collecting python-iso639 (from unstructured)\n",
      "  Downloading python_iso639-2024.10.22-py3-none-any.whl.metadata (13 kB)\n",
      "Collecting langdetect (from unstructured)\n",
      "  Downloading langdetect-1.0.9.tar.gz (981 kB)\n",
      "     ---------------------------------------- 0.0/981.5 kB ? eta -:--:--\n",
      "     ---------------------------------------- 0.0/981.5 kB ? eta -:--:--\n",
      "     ---------- ----------------------------- 262.1/981.5 kB ? eta -:--:--\n",
      "     -------------------- ----------------- 524.3/981.5 kB 1.7 MB/s eta 0:00:01\n",
      "     ------------------------------ ------- 786.4/981.5 kB 1.2 MB/s eta 0:00:01\n",
      "     -------------------------------------- 981.5/981.5 kB 1.2 MB/s eta 0:00:00\n",
      "  Preparing metadata (setup.py): started\n",
      "  Preparing metadata (setup.py): finished with status 'done'\n",
      "Requirement already satisfied: numpy in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured) (1.24.3)\n",
      "Collecting rapidfuzz (from unstructured)\n",
      "  Downloading rapidfuzz-3.9.7-cp38-cp38-win_amd64.whl.metadata (12 kB)\n",
      "Requirement already satisfied: backoff in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured) (2.2.1)\n",
      "Requirement already satisfied: typing-extensions in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured) (4.13.2)\n",
      "Collecting unstructured-client (from unstructured)\n",
      "  Downloading unstructured_client-0.28.1-py3-none-any.whl.metadata (20 kB)\n",
      "Requirement already satisfied: wrapt in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured) (1.17.2)\n",
      "Requirement already satisfied: soupsieve>1.2 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from beautifulsoup4->unstructured) (2.5)\n",
      "Requirement already satisfied: marshmallow<4.0.0,>=3.18.0 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from dataclasses-json->unstructured) (3.22.0)\n",
      "Requirement already satisfied: typing-inspect<1,>=0.4.0 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from dataclasses-json->unstructured) (0.9.0)\n",
      "Requirement already satisfied: six in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from langdetect->unstructured) (1.16.0)\n",
      "Requirement already satisfied: click in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from nltk->unstructured) (8.1.8)\n",
      "Requirement already satisfied: joblib in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from nltk->unstructured) (1.4.2)\n",
      "Requirement already satisfied: regex>=2021.8.3 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from nltk->unstructured) (2024.11.6)\n",
      "Requirement already satisfied: tqdm in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from nltk->unstructured) (4.67.1)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from requests->unstructured) (3.3.2)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from requests->unstructured) (3.10)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from requests->unstructured) (2.2.3)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from requests->unstructured) (2024.8.30)\n",
      "Collecting aiofiles>=24.1.0 (from unstructured-client->unstructured)\n",
      "  Downloading aiofiles-24.1.0-py3-none-any.whl.metadata (10 kB)\n",
      "Collecting cryptography>=3.1 (from unstructured-client->unstructured)\n",
      "  Downloading cryptography-44.0.2-cp37-abi3-win_amd64.whl.metadata (5.7 kB)\n",
      "Collecting eval-type-backport<0.3.0,>=0.2.0 (from unstructured-client->unstructured)\n",
      "  Downloading eval_type_backport-0.2.2-py3-none-any.whl.metadata (2.2 kB)\n",
      "Requirement already satisfied: httpx>=0.27.0 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured-client->unstructured) (0.27.0)\n",
      "Collecting jsonpath-python<2.0.0,>=1.0.6 (from unstructured-client->unstructured)\n",
      "  Downloading jsonpath_python-1.0.6-py3-none-any.whl.metadata (12 kB)\n",
      "Requirement already satisfied: nest-asyncio>=1.6.0 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured-client->unstructured) (1.6.0)\n",
      "Collecting pydantic<2.10.0,>=2.9.2 (from unstructured-client->unstructured)\n",
      "  Downloading pydantic-2.9.2-py3-none-any.whl.metadata (149 kB)\n",
      "Collecting pypdf>=4.0 (from unstructured-client->unstructured)\n",
      "  Downloading pypdf-5.4.0-py3-none-any.whl.metadata (7.3 kB)\n",
      "Requirement already satisfied: python-dateutil<3.0.0,>=2.8.2 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured-client->unstructured) (2.9.0.post0)\n",
      "Requirement already satisfied: requests-toolbelt>=1.0.0 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from unstructured-client->unstructured) (1.0.0)\n",
      "Requirement already satisfied: cffi>=1.12 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from cryptography>=3.1->unstructured-client->unstructured) (1.17.1)\n",
      "Requirement already satisfied: anyio in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from httpx>=0.27.0->unstructured-client->unstructured) (4.2.0)\n",
      "Requirement already satisfied: httpcore==1.* in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from httpx>=0.27.0->unstructured-client->unstructured) (1.0.2)\n",
      "Requirement already satisfied: sniffio in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from httpx>=0.27.0->unstructured-client->unstructured) (1.3.0)\n",
      "Requirement already satisfied: h11<0.15,>=0.13 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from httpcore==1.*->httpx>=0.27.0->unstructured-client->unstructured) (0.14.0)\n",
      "Requirement already satisfied: packaging>=17.0 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from marshmallow<4.0.0,>=3.18.0->dataclasses-json->unstructured) (24.1)\n",
      "Requirement already satisfied: annotated-types>=0.6.0 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from pydantic<2.10.0,>=2.9.2->unstructured-client->unstructured) (0.7.0)\n",
      "Collecting pydantic-core==2.23.4 (from pydantic<2.10.0,>=2.9.2->unstructured-client->unstructured)\n",
      "  Downloading pydantic_core-2.23.4-cp38-none-win_amd64.whl.metadata (6.7 kB)\n",
      "Requirement already satisfied: mypy-extensions>=0.3.0 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from typing-inspect<1,>=0.4.0->dataclasses-json->unstructured) (1.0.0)\n",
      "Requirement already satisfied: colorama in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from click->nltk->unstructured) (0.4.6)\n",
      "Requirement already satisfied: pycparser in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from cffi>=1.12->cryptography>=3.1->unstructured-client->unstructured) (2.21)\n",
      "Requirement already satisfied: exceptiongroup>=1.0.2 in d:\\cachedata\\anaconda\\envs\\hanlp-python38\\lib\\site-packages (from anyio->httpx>=0.27.0->unstructured-client->unstructured) (1.2.0)\n",
      "Downloading unstructured-0.11.8-py3-none-any.whl (1.8 MB)\n",
      "   ---------------------------------------- 0.0/1.8 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.8 MB ? eta -:--:--\n",
      "   ----------- ---------------------------- 0.5/1.8 MB 1.7 MB/s eta 0:00:01\n",
      "   ----------- ---------------------------- 0.5/1.8 MB 1.7 MB/s eta 0:00:01\n",
      "   ----------------------------- ---------- 1.3/1.8 MB 1.6 MB/s eta 0:00:01\n",
      "   ----------------------------------- ---- 1.6/1.8 MB 1.5 MB/s eta 0:00:01\n",
      "   ----------------------------------- ---- 1.6/1.8 MB 1.5 MB/s eta 0:00:01\n",
      "   ----------------------------------- ---- 1.6/1.8 MB 1.5 MB/s eta 0:00:01\n",
      "   ---------------------------------------- 1.8/1.8 MB 1.1 MB/s eta 0:00:00\n",
      "Downloading chardet-5.2.0-py3-none-any.whl (199 kB)\n",
      "Downloading emoji-2.14.1-py3-none-any.whl (590 kB)\n",
      "   ---------------------------------------- 0.0/590.6 kB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/590.6 kB ? eta -:--:--\n",
      "   ----------------- ---------------------- 262.1/590.6 kB ? eta -:--:--\n",
      "   ----------------- ---------------------- 262.1/590.6 kB ? eta -:--:--\n",
      "   -------------------------------------- 590.6/590.6 kB 617.4 kB/s eta 0:00:00\n",
      "Downloading filetype-1.2.0-py2.py3-none-any.whl (19 kB)\n",
      "Downloading lxml-5.3.2-cp38-cp38-win_amd64.whl (3.8 MB)\n",
      "   ---------------------------------------- 0.0/3.8 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.8 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.8 MB ? eta -:--:--\n",
      "   -- ------------------------------------- 0.3/3.8 MB ? eta -:--:--\n",
      "   -- ------------------------------------- 0.3/3.8 MB ? eta -:--:--\n",
      "   -- ------------------------------------- 0.3/3.8 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.5/3.8 MB 409.6 kB/s eta 0:00:09\n",
      "   ----- ---------------------------------- 0.5/3.8 MB 409.6 kB/s eta 0:00:09\n",
      "   -------- ------------------------------- 0.8/3.8 MB 493.7 kB/s eta 0:00:07\n",
      "   -------- ------------------------------- 0.8/3.8 MB 493.7 kB/s eta 0:00:07\n",
      "   -------- ------------------------------- 0.8/3.8 MB 493.7 kB/s eta 0:00:07\n",
      "   -------- ------------------------------- 0.8/3.8 MB 493.7 kB/s eta 0:00:07\n",
      "   ---------- ----------------------------- 1.0/3.8 MB 423.0 kB/s eta 0:00:07\n",
      "   ---------- ----------------------------- 1.0/3.8 MB 423.0 kB/s eta 0:00:07\n",
      "   ---------- ----------------------------- 1.0/3.8 MB 423.0 kB/s eta 0:00:07\n",
      "   ---------- ----------------------------- 1.0/3.8 MB 423.0 kB/s eta 0:00:07\n",
      "   ------------- -------------------------- 1.3/3.8 MB 372.9 kB/s eta 0:00:07\n",
      "   ------------- -------------------------- 1.3/3.8 MB 372.9 kB/s eta 0:00:07\n",
      "   ------------- -------------------------- 1.3/3.8 MB 372.9 kB/s eta 0:00:07\n",
      "   ------------- -------------------------- 1.3/3.8 MB 372.9 kB/s eta 0:00:07\n",
      "   ---------------- ----------------------- 1.6/3.8 MB 352.5 kB/s eta 0:00:07\n",
      "   ---------------- ----------------------- 1.6/3.8 MB 352.5 kB/s eta 0:00:07\n",
      "   ---------------- ----------------------- 1.6/3.8 MB 352.5 kB/s eta 0:00:07\n",
      "   ---------------- ----------------------- 1.6/3.8 MB 352.5 kB/s eta 0:00:07\n",
      "   ---------------- ----------------------- 1.6/3.8 MB 352.5 kB/s eta 0:00:07\n",
      "   ---------------- ----------------------- 1.6/3.8 MB 352.5 kB/s eta 0:00:07\n",
      "   ---------------- ----------------------- 1.6/3.8 MB 352.5 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   ------------------- -------------------- 1.8/3.8 MB 304.1 kB/s eta 0:00:07\n",
      "   --------------------- ------------------ 2.1/3.8 MB 256.4 kB/s eta 0:00:07\n",
      "   --------------------- ------------------ 2.1/3.8 MB 256.4 kB/s eta 0:00:07\n",
      "   --------------------- ------------------ 2.1/3.8 MB 256.4 kB/s eta 0:00:07\n",
      "   --------------------- ------------------ 2.1/3.8 MB 256.4 kB/s eta 0:00:07\n",
      "   --------------------- ------------------ 2.1/3.8 MB 256.4 kB/s eta 0:00:07\n",
      "   --------------------- ------------------ 2.1/3.8 MB 256.4 kB/s eta 0:00:07\n",
      "   --------------------- ------------------ 2.1/3.8 MB 256.4 kB/s eta 0:00:07\n",
      "   --------------------- ------------------ 2.1/3.8 MB 256.4 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   ------------------------ --------------- 2.4/3.8 MB 232.6 kB/s eta 0:00:07\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 2.6/3.8 MB 215.1 kB/s eta 0:00:06\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   ------------------------------ --------- 2.9/3.8 MB 193.5 kB/s eta 0:00:05\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 3.1/3.8 MB 170.3 kB/s eta 0:00:04\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   ----------------------------------- ---- 3.4/3.8 MB 154.6 kB/s eta 0:00:03\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 3.7/3.8 MB 146.2 kB/s eta 0:00:01\n",
      "   ---------------------------------------- 3.8/3.8 MB 134.3 kB/s eta 0:00:00\n",
      "Downloading nltk-3.9.1-py3-none-any.whl (1.5 MB)\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.5 MB ? eta -:--:--\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   ------------- -------------------------- 0.5/1.5 MB 81.8 kB/s eta 0:00:12\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   -------------------- ------------------- 0.8/1.5 MB 83.3 kB/s eta 0:00:09\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.0/1.5 MB 65.0 kB/s eta 0:00:08\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------- ----- 1.3/1.5 MB 53.9 kB/s eta 0:00:04\n",
      "   ---------------------------------------- 1.5/1.5 MB 52.0 kB/s eta 0:00:00\n",
      "Downloading python_iso639-2024.10.22-py3-none-any.whl (274 kB)\n",
      "Downloading python_magic-0.4.27-py2.py3-none-any.whl (13 kB)\n",
      "Downloading rapidfuzz-3.9.7-cp38-cp38-win_amd64.whl (1.7 MB)\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.3/1.7 MB ? eta -:--:--\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------ --------------------------- 0.5/1.7 MB 99.9 kB/s eta 0:00:12\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------ --------------------- 0.8/1.7 MB 97.0 kB/s eta 0:00:10\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------- -------------- 1.0/1.7 MB 81.2 kB/s eta 0:00:08\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------- -------- 1.3/1.7 MB 75.1 kB/s eta 0:00:05\n",
      "   ------------------------------------- -- 1.6/1.7 MB 77.0 kB/s eta 0:00:02\n",
      "   ------------------------------------- -- 1.6/1.7 MB 77.0 kB/s eta 0:00:02\n",
      "   ------------------------------------- -- 1.6/1.7 MB 77.0 kB/s eta 0:00:02\n",
      "   ------------------------------------- -- 1.6/1.7 MB 77.0 kB/s eta 0:00:02\n",
      "   ------------------------------------- -- 1.6/1.7 MB 77.0 kB/s eta 0:00:02\n",
      "   ---------------------------------------- 1.7/1.7 MB 78.3 kB/s eta 0:00:00\n",
      "Downloading tabulate-0.9.0-py3-none-any.whl (35 kB)\n",
      "Downloading unstructured_client-0.28.1-py3-none-any.whl (62 kB)\n",
      "Downloading aiofiles-24.1.0-py3-none-any.whl (15 kB)\n",
      "Downloading cryptography-44.0.2-cp37-abi3-win_amd64.whl (3.2 MB)\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   --- ------------------------------------ 0.3/3.2 MB ? eta -:--:--\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   ------ --------------------------------- 0.5/3.2 MB 114.9 kB/s eta 0:00:24\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   --------- ------------------------------ 0.8/3.2 MB 116.9 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ------------- -------------------------- 1.0/3.2 MB 103.6 kB/s eta 0:00:21\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ---------------- ----------------------- 1.3/3.2 MB 102.5 kB/s eta 0:00:19\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ------------------- -------------------- 1.6/3.2 MB 94.5 kB/s eta 0:00:18\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   ---------------------- ----------------- 1.8/3.2 MB 86.1 kB/s eta 0:00:16\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   -------------------------- ------------- 2.1/3.2 MB 84.0 kB/s eta 0:00:14\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   ----------------------------- ---------- 2.4/3.2 MB 82.3 kB/s eta 0:00:11\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   -------------------------------- ------- 2.6/3.2 MB 83.9 kB/s eta 0:00:08\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ----------------------------------- ---- 2.9/3.2 MB 81.3 kB/s eta 0:00:05\n",
      "   ---------------------------------------  3.1/3.2 MB 77.0 kB/s eta 0:00:01\n",
      "   ---------------------------------------  3.1/3.2 MB 77.0 kB/s eta 0:00:01\n",
      "   ---------------------------------------  3.1/3.2 MB 77.0 kB/s eta 0:00:01\n",
      "   ---------------------------------------  3.1/3.2 MB 77.0 kB/s eta 0:00:01\n",
      "   ---------------------------------------- 3.2/3.2 MB 77.2 kB/s eta 0:00:00\n",
      "Downloading eval_type_backport-0.2.2-py3-none-any.whl (5.8 kB)\n",
      "Downloading jsonpath_python-1.0.6-py3-none-any.whl (7.6 kB)\n",
      "Downloading pydantic-2.9.2-py3-none-any.whl (434 kB)\n",
      "Downloading pydantic_core-2.23.4-cp38-none-win_amd64.whl (1.9 MB)\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ---------------------------------------- 0.0/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ----- ---------------------------------- 0.3/1.9 MB ? eta -:--:--\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------- ----------------------------- 0.5/1.9 MB 109.6 kB/s eta 0:00:13\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   ---------------- ----------------------- 0.8/1.9 MB 107.9 kB/s eta 0:00:11\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------- ------------------ 1.0/1.9 MB 110.1 kB/s eta 0:00:08\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   --------------------------- ------------ 1.3/1.9 MB 105.4 kB/s eta 0:00:06\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------- ------- 1.6/1.9 MB 104.1 kB/s eta 0:00:04\n",
      "   -------------------------------------- - 1.8/1.9 MB 100.3 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 1.8/1.9 MB 100.3 kB/s eta 0:00:01\n",
      "   -------------------------------------- - 1.8/1.9 MB 100.3 kB/s eta 0:00:01\n",
      "   ---------------------------------------- 1.9/1.9 MB 102.1 kB/s eta 0:00:00\n",
      "Downloading pypdf-5.4.0-py3-none-any.whl (302 kB)\n",
      "Building wheels for collected packages: langdetect\n",
      "  Building wheel for langdetect (setup.py): started\n",
      "  Building wheel for langdetect (setup.py): finished with status 'done'\n",
      "  Created wheel for langdetect: filename=langdetect-1.0.9-py3-none-any.whl size=993251 sha256=e255e8ccb5b411ada63e67f1ecc538f38012709f0a90de8099968fd39807b5b7\n",
      "  Stored in directory: c:\\users\\mi\\appdata\\local\\pip\\cache\\wheels\\13\\c7\\b0\\79f66658626032e78fc1a83103690ef6797d551cb22e56e734\n",
      "Successfully built langdetect\n",
      "Installing collected packages: filetype, tabulate, rapidfuzz, python-magic, python-iso639, pypdf, pydantic-core, lxml, langdetect, jsonpath-python, eval-type-backport, emoji, chardet, aiofiles, pydantic, nltk, cryptography, unstructured-client, unstructured\n",
      "  Attempting uninstall: pydantic-core\n",
      "    Found existing installation: pydantic_core 2.27.2\n",
      "    Uninstalling pydantic_core-2.27.2:\n",
      "      Successfully uninstalled pydantic_core-2.27.2\n",
      "  Attempting uninstall: pydantic\n",
      "    Found existing installation: pydantic 2.10.6\n",
      "    Uninstalling pydantic-2.10.6:\n",
      "      Successfully uninstalled pydantic-2.10.6\n",
      "Successfully installed aiofiles-24.1.0 chardet-5.2.0 cryptography-44.0.2 emoji-2.14.1 eval-type-backport-0.2.2 filetype-1.2.0 jsonpath-python-1.0.6 langdetect-1.0.9 lxml-5.3.2 nltk-3.9.1 pydantic-2.9.2 pydantic-core-2.23.4 pypdf-5.4.0 python-iso639-2024.10.22 python-magic-0.4.27 rapidfuzz-3.9.7 tabulate-0.9.0 unstructured-0.11.8 unstructured-client-0.28.1\n"
     ]
    }
   ],
   "source": [
    "!pip install -q chromadb\n",
    "!pip install -q tiktoken\n",
    "!pip install -qU langchain-openai\n",
    "!pip install -qU langchain-community\n",
    "!pip install unstructured"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "2fa3ff6e-dc2b-4844-a10e-4f51439db890",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "sk-9jGAgpYWFWdc8Ellt5KUUj5J1YZHWzuYKfe3XMpWa43tNLwf\n",
      "https://api.fe8.cn/v1\n",
      "None\n"
     ]
    }
   ],
   "source": [
    "import os\n",
    "from openai import OpenAI\n",
    "from dotenv import load_dotenv,find_dotenv\n",
    "\n",
    "load_dotenv(find_dotenv())  # 加载 .env 文件中的环境变量\n",
    "\n",
    "api_key = os.environ.get('OPENAI_API_KEY')\n",
    "base_url = os.environ.get('OPENAI_BASE_URL')\n",
    "url = os.environ.get('OPENAI_API_BASE')\n",
    "print(api_key)\n",
    "print(base_url)\n",
    "print(url)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "f9c744aa-5621-4bf8-9975-92fae3ac123c",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "\n",
    "\n",
    "from langchain.schema import HumanMessage, SystemMessage, AIMessage\n",
    "\n",
    "# 这就是我们将要使用的语言模型。我们将在下一节中讨论我们正在做的事情\n",
    "llm = ChatOpenAI(model=\"gpt-4o\", temperature=0.7)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "9be471a9-d8d6-4934-b206-ef794067ac62",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "AIMessage(content='你可以尝试做一个新鲜的番茄意大利面，简单又美味！', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 22, 'prompt_tokens': 44, 'total_tokens': 66, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-4o-2024-08-06', 'system_fingerprint': 'fp_ee1d74bde0', 'finish_reason': 'stop', 'logprobs': None}, id='run-8fe22812-51e2-412a-a9dc-d4b250b8e622-0', usage_metadata={'input_tokens': 44, 'output_tokens': 22, 'total_tokens': 66})"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "llm.invoke(\n",
    "    [\n",
    "        SystemMessage(content=\"你是一个很棒的人工智能机器人，能用一句话帮助用户知道该吃什么\"),\n",
    "        HumanMessage(content=\"我喜欢吃西红柿，应该吃什么呢？\")\n",
    "    ]\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "47452824-3035-4e42-b9b6-317cc7c4d848",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "AIMessage(content='在尼斯，除了享受海滩，你还可以探索老城、参观马蒂斯博物馆，或在沿海大道散步。', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 34, 'prompt_tokens': 62, 'total_tokens': 96, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-4o-2024-08-06', 'system_fingerprint': 'fp_ee1d74bde0', 'finish_reason': 'stop', 'logprobs': None}, id='run-ccd39eda-1dce-4909-b111-d6582d5ae69c-0', usage_metadata={'input_tokens': 62, 'output_tokens': 34, 'total_tokens': 96})"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "llm.invoke(\n",
    "    [\n",
    "        SystemMessage(content=\"你是一个友好的AI机器人，可以用简短的一句话帮助用户决定去哪里旅行\"),\n",
    "        HumanMessage(content=\"我喜欢海滩，我应该去哪里？\"),\n",
    "        AIMessage(content=\"你应该去法国尼斯\"),\n",
    "        HumanMessage(content=\"我到那里后还应该做些什么？\")\n",
    "    ]\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "4100a6f0-6c53-4b63-9126-0fbe8aab3cc8",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "AIMessage(content='星期四之后是星期五。', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 8, 'prompt_tokens': 14, 'total_tokens': 22, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-4o-2024-08-06', 'system_fingerprint': 'fp_ee1d74bde0', 'finish_reason': 'stop', 'logprobs': None}, id='run-46ea3097-5bb5-421d-a9f5-454c12d162f0-0', usage_metadata={'input_tokens': 14, 'output_tokens': 8, 'total_tokens': 22})"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "llm.invoke(\n",
    "    [\n",
    "        HumanMessage(content=\"星期四之后是星期几？\")\n",
    "    ]\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "e0119f94-cd2b-48d5-b3a3-4c4014fa126f",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.schema import Document"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "97bdc43e-c204-4d28-886c-91ecaa7ff73e",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Document(metadata={'my_document_id': 234234, 'my_document_source': 'LangChain 论文', 'my_document_create_time': 1680013019}, page_content='这是我的文档。它包括了我从其他地方收集的信息')"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Document(page_content=\"这是我的文档。它包括了我从其他地方收集的信息\",\n",
    "         metadata={\n",
    "             'my_document_id' : 234234,\n",
    "             'my_document_source' : \"LangChain 论文\",\n",
    "             'my_document_create_time' : 1680013019\n",
    "         })"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "2008a350-3d89-4cd2-b013-937329d87b09",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Document(page_content='这是我的文档。它包括了我从其他地方收集的信息')"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Document(page_content=\"这是我的文档。它包括了我从其他地方收集的信息\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "c6324823-b505-4164-a4e1-d63fd9eb4354",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "\n",
    "llm = ChatOpenAI(model_name=\"gpt-3.5-turbo\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "886e23cf-2839-4c62-b009-0609f1224c3c",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "AIMessage(content='星期五之后是星期六。', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 11, 'prompt_tokens': 18, 'total_tokens': 29, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-3.5-turbo', 'system_fingerprint': 'fp_0165350fbb', 'finish_reason': 'stop', 'logprobs': None}, id='run-e64ae955-7809-4fad-bb62-daf0aa6b5d8d-0', usage_metadata={'input_tokens': 18, 'output_tokens': 11, 'total_tokens': 29})"
      ]
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "llm.invoke(\"星期五之后是星期几？\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "814b938f-9770-4041-a9da-b6d04dd78be7",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "from langchain.schema import HumanMessage, SystemMessage, AIMessage\n",
    "\n",
    "llm = ChatOpenAI(model_name=\"gpt-3.5-turbo\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "b3fb8c1f-5d63-4d3f-aded-bd8db9b8bdc4",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "AIMessage(content='首先，你可以尝试把地图倒过来看，也许上海就在你脚下呢！哈哈哈！不过，实际上你可以乘坐飞机、火车或汽车去上海，看你喜欢哪种交通方式啦！', additional_kwargs={'refusal': None}, response_metadata={'token_usage': {'completion_tokens': 78, 'prompt_tokens': 62, 'total_tokens': 140, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-3.5-turbo', 'system_fingerprint': 'fp_0165350fbb', 'finish_reason': 'stop', 'logprobs': None}, id='run-258b5825-557c-4dec-afdb-5d738d0a3478-0', usage_metadata={'input_tokens': 62, 'output_tokens': 78, 'total_tokens': 140})"
      ]
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "llm.invoke(\n",
    "    [\n",
    "        SystemMessage(content=\"你是一个毫无帮助的人工智能机器人，无论用户说什么，它都会开玩笑\"),\n",
    "        HumanMessage(content=\"我想去上海，应该怎么办？\")\n",
    "    ]\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fbb0acf8-fe64-4c77-9f92-a37f60872e6f",
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "import openai\n",
    "import requests\n",
    "\n",
    "# 设置OpenAI API密钥\n",
    "# openai.api_key = os.environ.get('OPENAI_API_KEY')\n",
    "# 初始化 OpenAI 客户端\n",
    "client = openai.OpenAI(api_key=os.environ.get(\"OPENAI_API_KEY\"))\n",
    "\n",
    "# 调用ChatGPT API并设置超时时间\n",
    "try:\n",
    "    # response = openai.ChatCompletion.create(\n",
    "    response = client.chat.completions.create(\n",
    "        model=\"gpt-3.5-turbo\",\n",
    "        messages=[{\"role\": \"user\", \"content\": \"Hello, ChatGPT!\"}],\n",
    "        timeout=600  # 设置超时时间为60秒\n",
    "    )\n",
    "    print(response)\n",
    "except requests.exceptions.RequestException as e:\n",
    "    print(\"APIConnectionError:\", e)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "93b4873a-b36d-471f-b2e8-8b62a1674e21",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "content='' additional_kwargs={'function_call': {'arguments': '{\"location\":\"中国上海\"}', 'name': 'get_current_weather'}, 'refusal': None} response_metadata={'token_usage': {'completion_tokens': 17, 'prompt_tokens': 82, 'total_tokens': 99, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-4o-2024-08-06', 'system_fingerprint': 'fp_ee1d74bde0', 'finish_reason': 'function_call', 'logprobs': None} id='run-65cbf926-e2e6-4396-87ec-efef3ee0591a-0' usage_metadata={'input_tokens': 82, 'output_tokens': 17, 'total_tokens': 99}\n"
     ]
    }
   ],
   "source": [
    "llm = ChatOpenAI(model_name=\"gpt-4o\")\n",
    "\n",
    "messages = [\n",
    "    SystemMessage(content=\"你是一个乐于助人的人工智能机器人\"),\n",
    "    HumanMessage(content=\"上海现在的天气怎么样？\")\n",
    "]\n",
    "\n",
    "functions=[{\n",
    "    \"name\": \"get_current_weather\",\n",
    "    \"description\": \"获取指定位置的当前天气\",\n",
    "    \"parameters\": {\n",
    "        \"type\": \"object\",\n",
    "        \"properties\": {\n",
    "           \"location\": {\n",
    "                \"type\": \"string\",\n",
    "                \"description\": \"城市和州，例如中国上海\"\n",
    "            },\n",
    "           \"unit\": {\n",
    "                \"type\": \"string\",\n",
    "                \"enum\": [\"celsius\", \"fahrenheit\"]\n",
    "             }\n",
    "         },\n",
    "       \"required\": [\"location\"]\n",
    "    }\n",
    "}]\n",
    "\n",
    "output = llm.invoke(input=messages, functions=functions)\n",
    "\n",
    "print(output)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "3f157864-954e-405c-ad4f-b6cd9a498bf0",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain_openai import OpenAIEmbeddings\n",
    "\n",
    "embeddings = OpenAIEmbeddings()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "7617a4b8-7df5-4562-b156-ac126095d1a6",
   "metadata": {},
   "outputs": [],
   "source": [
    "text = \"今天艳阳高照天气很舒服！\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "3072851b-0ae5-4d4c-8b1e-57bf585b5249",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Here's a sample: [0.017948752269148827, 0.008655799552798271, 0.006652832496911287, -0.021351424977183342, 0.006696891039609909]...\n",
      "Your embedding is length 1536\n"
     ]
    }
   ],
   "source": [
    "text_embedding = embeddings.embed_query(text)\n",
    "print (f\"Here's a sample: {text_embedding[:5]}...\")\n",
    "print (f\"Your embedding is length {len(text_embedding)}\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "634a6b44-818e-40d6-a9ce-fcae2f507c24",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\MI\\AppData\\Local\\Temp\\ipykernel_11272\\2407827430.py:11: LangChainDeprecationWarning: The method `BaseChatModel.__call__` was deprecated in langchain-core 0.1.7 and will be removed in 1.0. Use invoke instead.\n",
      "  print(llm(prompt))\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "content='这句话有逻辑上的问题。通常，一周的顺序是星期一、星期二、星期三、星期四、星期五、星期六和星期日。如果今天是星期一，那么明天应当是星期二，而不是星期三。因此，这句话在时间顺序上不正确。' additional_kwargs={'refusal': None} response_metadata={'token_usage': {'completion_tokens': 64, 'prompt_tokens': 23, 'total_tokens': 87, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-4o-2024-08-06', 'system_fingerprint': 'fp_ee1d74bde0', 'finish_reason': 'stop', 'logprobs': None} id='run-41a460e5-1ae4-457f-8b30-0cebabd92b0c-0' usage_metadata={'input_tokens': 23, 'output_tokens': 64, 'total_tokens': 87}\n"
     ]
    }
   ],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "\n",
    "llm = ChatOpenAI(model_name=\"gpt-4o\")\n",
    "\n",
    "prompt = \"\"\"\n",
    "今天是星期一，明天是星期三。\n",
    "\n",
    "这句话有什么问题？\n",
    "\"\"\"\n",
    "\n",
    "print(llm(prompt))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "id": "2ee0fedd-822d-45ad-a46e-dd9a1f0d5344",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "最终提示词: \n",
      "我真的很想去上海旅行。我应该在那里做什么？\n",
      "\n",
      "用一个简短的句子回复\n",
      "\n",
      "-----------\n",
      "LLM 输出: content='参观外滩，品尝地道小吃，探索豫园。' additional_kwargs={'refusal': None} response_metadata={'token_usage': {'completion_tokens': 19, 'prompt_tokens': 28, 'total_tokens': 47, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-4o-2024-08-06', 'system_fingerprint': 'fp_ee1d74bde0', 'finish_reason': 'stop', 'logprobs': None} id='run-e6e1cbc4-be32-4821-b5e4-330bdd6ed04a-0' usage_metadata={'input_tokens': 28, 'output_tokens': 19, 'total_tokens': 47}\n"
     ]
    }
   ],
   "source": [
    "from langchain import PromptTemplate\n",
    "\n",
    "llm = ChatOpenAI(model_name=\"gpt-4o\")\n",
    "\n",
    "# 注意下面的\"location\"，这是稍后另一个值的占位符\n",
    "template = \"\"\"\n",
    "我真的很想去{location}旅行。我应该在那里做什么？\n",
    "\n",
    "用一个简短的句子回复\n",
    "\"\"\"\n",
    "\n",
    "prompt = PromptTemplate(\n",
    "    input_variables=[\"location\"],\n",
    "    template=template,\n",
    ")\n",
    "\n",
    "final_prompt = prompt.format(location='上海')\n",
    "\n",
    "print (f\"最终提示词: {final_prompt}\")\n",
    "print (\"-----------\")\n",
    "print (f\"LLM 输出: {llm(final_prompt)}\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "ee194f9b-b358-4dac-bfe0-e37a2575acc6",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.prompts.example_selector import SemanticSimilarityExampleSelector\n",
    "from langchain.vectorstores import Chroma\n",
    "from langchain.embeddings import OpenAIEmbeddings\n",
    "from langchain.prompts import FewShotPromptTemplate, PromptTemplate\n",
    "from langchain.llms import OpenAI\n",
    "\n",
    "llm = ChatOpenAI(model_name=\"gpt-4o\")\n",
    "\n",
    "example_prompt = PromptTemplate(\n",
    "    input_variables=[\"input\", \"output\"],\n",
    "    template=\"示例输入: {input}\\n示例输出: {output}\",\n",
    ")\n",
    "\n",
    "# 名词所在位置的示例\n",
    "examples = [\n",
    "    {\"input\": \"海盗\", \"output\": \"船\"},\n",
    "    {\"input\": \"飞行员\", \"output\": \"飞机\"},\n",
    "    {\"input\": \"司机\", \"output\": \"汽车\"},\n",
    "    {\"input\": \"树\", \"output\": \"地面\"},\n",
    "    {\"input\": \"鸟\", \"output\": \"巢\"},\n",
    "]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "350c0632-a5e1-48cf-a68e-bfb98a37a1e8",
   "metadata": {},
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'example_selector' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mNameError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[1;32mIn[21], line 3\u001b[0m\n\u001b[0;32m      1\u001b[0m similar_prompt \u001b[38;5;241m=\u001b[39m FewShotPromptTemplate(\n\u001b[0;32m      2\u001b[0m     \u001b[38;5;66;03m# 有助于选择示例的对象\u001b[39;00m\n\u001b[1;32m----> 3\u001b[0m     example_selector\u001b[38;5;241m=\u001b[39m\u001b[43mexample_selector\u001b[49m,\n\u001b[0;32m      4\u001b[0m \n\u001b[0;32m      5\u001b[0m     \u001b[38;5;66;03m# 你的提示词\u001b[39;00m\n\u001b[0;32m      6\u001b[0m     example_prompt\u001b[38;5;241m=\u001b[39mexample_prompt,\n\u001b[0;32m      7\u001b[0m \n\u001b[0;32m      8\u001b[0m     \u001b[38;5;66;03m# 将添加到提示顶部和底部的自定义内容\u001b[39;00m\n\u001b[0;32m      9\u001b[0m     prefix\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m给出物品通常出现的位置\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m     10\u001b[0m     suffix\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mInput: \u001b[39m\u001b[38;5;132;01m{noun}\u001b[39;00m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124mOutput:\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[0;32m     11\u001b[0m \n\u001b[0;32m     12\u001b[0m     \u001b[38;5;66;03m# 你的提示将收到什么输入\u001b[39;00m\n\u001b[0;32m     13\u001b[0m     input_variables\u001b[38;5;241m=\u001b[39m[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnoun\u001b[39m\u001b[38;5;124m\"\u001b[39m],\n\u001b[0;32m     14\u001b[0m )\n",
      "\u001b[1;31mNameError\u001b[0m: name 'example_selector' is not defined"
     ]
    }
   ],
   "source": [
    "similar_prompt = FewShotPromptTemplate(\n",
    "    # 有助于选择示例的对象\n",
    "    example_selector=example_selector,\n",
    "\n",
    "    # 你的提示词\n",
    "    example_prompt=example_prompt,\n",
    "\n",
    "    # 将添加到提示顶部和底部的自定义内容\n",
    "    prefix=\"给出物品通常出现的位置\",\n",
    "    suffix=\"Input: {noun}\\nOutput:\",\n",
    "\n",
    "    # 你的提示将收到什么输入\n",
    "    input_variables=[\"noun\"],\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "76a13ab9-3635-4840-b4fc-9cad6fd5d917",
   "metadata": {},
   "outputs": [],
   "source": [
    "# SemanticSimilarityExampleSelector 将通过语义含义选择与输入相似的示例\n",
    "\n",
    "example_selector = SemanticSimilarityExampleSelector.from_examples(\n",
    "    # 这是可供选择的示例列表。\n",
    "    examples,\n",
    "\n",
    "    # 这是用于生成嵌入的嵌入类，用于测量语义相似性。\n",
    "    OpenAIEmbeddings(),\n",
    "\n",
    "    # 这是用于存储嵌入并进行相似性搜索的 VectorStore 类。\n",
    "    Chroma,\n",
    "\n",
    "    # 这是要生成的示例数量。\n",
    "    k=1\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "331a1cc8-8a71-468d-a0eb-3b7dde3ecd89",
   "metadata": {},
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'similar_prompt' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mNameError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[1;32mIn[22], line 4\u001b[0m\n\u001b[0;32m      2\u001b[0m my_noun1 \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m学生\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m      3\u001b[0m my_noun2 \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m植物\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m----> 4\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[43msimilar_prompt\u001b[49m\u001b[38;5;241m.\u001b[39mformat(noun\u001b[38;5;241m=\u001b[39mmy_noun))\n\u001b[0;32m      5\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m==============\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m      6\u001b[0m \u001b[38;5;28mprint\u001b[39m(similar_prompt\u001b[38;5;241m.\u001b[39mformat(noun\u001b[38;5;241m=\u001b[39mmy_noun1))\n",
      "\u001b[1;31mNameError\u001b[0m: name 'similar_prompt' is not defined"
     ]
    }
   ],
   "source": [
    "my_noun = \"老虎\"\n",
    "my_noun1 = \"学生\"\n",
    "my_noun2 = \"植物\"\n",
    "print(similar_prompt.format(noun=my_noun))\n",
    "print(\"==============\")\n",
    "print(similar_prompt.format(noun=my_noun1))\n",
    "print(\"==============\")\n",
    "print(similar_prompt.format(noun=my_noun2))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "9eabdc63-9ed4-4b7a-ad9c-b9048f809aa1",
   "metadata": {},
   "outputs": [
    {
     "ename": "TypeError",
     "evalue": "'NoneType' object is not iterable",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[1;32mIn[24], line 25\u001b[0m\n\u001b[0;32m     23\u001b[0m \u001b[38;5;66;03m# 一个用于提示语言模型填充数据结构的查询。\u001b[39;00m\n\u001b[0;32m     24\u001b[0m prompt_and_model \u001b[38;5;241m=\u001b[39m prompt \u001b[38;5;241m|\u001b[39m model\n\u001b[1;32m---> 25\u001b[0m output \u001b[38;5;241m=\u001b[39m \u001b[43mprompt_and_model\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minvoke\u001b[49m\u001b[43m(\u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mquery\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43m可以讲个笑话给我听吗？\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m}\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m     26\u001b[0m parser\u001b[38;5;241m.\u001b[39minvoke(output)\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\runnables\\base.py:2879\u001b[0m, in \u001b[0;36mRunnableSequence.invoke\u001b[1;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[0;32m   2877\u001b[0m             \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m context\u001b[38;5;241m.\u001b[39mrun(step\u001b[38;5;241m.\u001b[39minvoke, \u001b[38;5;28minput\u001b[39m, config, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m   2878\u001b[0m         \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m-> 2879\u001b[0m             \u001b[38;5;28minput\u001b[39m \u001b[38;5;241m=\u001b[39m \u001b[43mcontext\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstep\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minvoke\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m   2880\u001b[0m \u001b[38;5;66;03m# finish the root run\u001b[39;00m\n\u001b[0;32m   2881\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:385\u001b[0m, in \u001b[0;36mBaseLLM.invoke\u001b[1;34m(self, input, config, stop, **kwargs)\u001b[0m\n\u001b[0;32m    375\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21minvoke\u001b[39m(\n\u001b[0;32m    376\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    377\u001b[0m     \u001b[38;5;28minput\u001b[39m: LanguageModelInput,\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    381\u001b[0m     \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[0;32m    382\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28mstr\u001b[39m:\n\u001b[0;32m    383\u001b[0m     config \u001b[38;5;241m=\u001b[39m ensure_config(config)\n\u001b[0;32m    384\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m (\n\u001b[1;32m--> 385\u001b[0m         \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate_prompt\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    386\u001b[0m \u001b[43m            \u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_convert_input\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43minput\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    387\u001b[0m \u001b[43m            \u001b[49m\u001b[43mstop\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    388\u001b[0m \u001b[43m            \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mcallbacks\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    389\u001b[0m \u001b[43m            \u001b[49m\u001b[43mtags\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtags\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    390\u001b[0m \u001b[43m            \u001b[49m\u001b[43mmetadata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmetadata\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    391\u001b[0m \u001b[43m            \u001b[49m\u001b[43mrun_name\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrun_name\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    392\u001b[0m \u001b[43m            \u001b[49m\u001b[43mrun_id\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpop\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrun_id\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    393\u001b[0m \u001b[43m            \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    394\u001b[0m \u001b[43m        \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    395\u001b[0m         \u001b[38;5;241m.\u001b[39mgenerations[\u001b[38;5;241m0\u001b[39m][\u001b[38;5;241m0\u001b[39m]\n\u001b[0;32m    396\u001b[0m         \u001b[38;5;241m.\u001b[39mtext\n\u001b[0;32m    397\u001b[0m     )\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:750\u001b[0m, in \u001b[0;36mBaseLLM.generate_prompt\u001b[1;34m(self, prompts, stop, callbacks, **kwargs)\u001b[0m\n\u001b[0;32m    742\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mgenerate_prompt\u001b[39m(\n\u001b[0;32m    743\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    744\u001b[0m     prompts: List[PromptValue],\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    747\u001b[0m     \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[0;32m    748\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m LLMResult:\n\u001b[0;32m    749\u001b[0m     prompt_strings \u001b[38;5;241m=\u001b[39m [p\u001b[38;5;241m.\u001b[39mto_string() \u001b[38;5;28;01mfor\u001b[39;00m p \u001b[38;5;129;01min\u001b[39;00m prompts]\n\u001b[1;32m--> 750\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprompt_strings\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallbacks\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:944\u001b[0m, in \u001b[0;36mBaseLLM.generate\u001b[1;34m(self, prompts, stop, callbacks, tags, metadata, run_name, run_id, **kwargs)\u001b[0m\n\u001b[0;32m    929\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m get_llm_cache() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m) \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mFalse\u001b[39;00m:\n\u001b[0;32m    930\u001b[0m     run_managers \u001b[38;5;241m=\u001b[39m [\n\u001b[0;32m    931\u001b[0m         callback_manager\u001b[38;5;241m.\u001b[39mon_llm_start(\n\u001b[0;32m    932\u001b[0m             dumpd(\u001b[38;5;28mself\u001b[39m),\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    942\u001b[0m         )\n\u001b[0;32m    943\u001b[0m     ]\n\u001b[1;32m--> 944\u001b[0m     output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_generate_helper\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    945\u001b[0m \u001b[43m        \u001b[49m\u001b[43mprompts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_managers\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mbool\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mnew_arg_supported\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m    946\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    947\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m output\n\u001b[0;32m    948\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(missing_prompts) \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m:\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:787\u001b[0m, in \u001b[0;36mBaseLLM._generate_helper\u001b[1;34m(self, prompts, stop, run_managers, new_arg_supported, **kwargs)\u001b[0m\n\u001b[0;32m    785\u001b[0m     \u001b[38;5;28;01mfor\u001b[39;00m run_manager \u001b[38;5;129;01min\u001b[39;00m run_managers:\n\u001b[0;32m    786\u001b[0m         run_manager\u001b[38;5;241m.\u001b[39mon_llm_error(e, response\u001b[38;5;241m=\u001b[39mLLMResult(generations\u001b[38;5;241m=\u001b[39m[]))\n\u001b[1;32m--> 787\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m    788\u001b[0m flattened_outputs \u001b[38;5;241m=\u001b[39m output\u001b[38;5;241m.\u001b[39mflatten()\n\u001b[0;32m    789\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m manager, flattened_output \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(run_managers, flattened_outputs):\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:774\u001b[0m, in \u001b[0;36mBaseLLM._generate_helper\u001b[1;34m(self, prompts, stop, run_managers, new_arg_supported, **kwargs)\u001b[0m\n\u001b[0;32m    764\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_generate_helper\u001b[39m(\n\u001b[0;32m    765\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    766\u001b[0m     prompts: List[\u001b[38;5;28mstr\u001b[39m],\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    770\u001b[0m     \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[0;32m    771\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m LLMResult:\n\u001b[0;32m    772\u001b[0m     \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m    773\u001b[0m         output \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m--> 774\u001b[0m             \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_generate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    775\u001b[0m \u001b[43m                \u001b[49m\u001b[43mprompts\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    776\u001b[0m \u001b[43m                \u001b[49m\u001b[43mstop\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    777\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;66;43;03m# TODO: support multiple run managers\u001b[39;49;00m\n\u001b[0;32m    778\u001b[0m \u001b[43m                \u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_managers\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mrun_managers\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m    779\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    780\u001b[0m \u001b[43m            \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    781\u001b[0m             \u001b[38;5;28;01mif\u001b[39;00m new_arg_supported\n\u001b[0;32m    782\u001b[0m             \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate(prompts, stop\u001b[38;5;241m=\u001b[39mstop)\n\u001b[0;32m    783\u001b[0m         )\n\u001b[0;32m    784\u001b[0m     \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m    785\u001b[0m         \u001b[38;5;28;01mfor\u001b[39;00m run_manager \u001b[38;5;129;01min\u001b[39;00m run_managers:\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_openai\\llms\\base.py:361\u001b[0m, in \u001b[0;36mBaseOpenAI._generate\u001b[1;34m(self, prompts, stop, run_manager, **kwargs)\u001b[0m\n\u001b[0;32m    358\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124merror\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m    359\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(response\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124merror\u001b[39m\u001b[38;5;124m\"\u001b[39m))\n\u001b[1;32m--> 361\u001b[0m \u001b[43mchoices\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mextend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mresponse\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mchoices\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    362\u001b[0m _update_token_usage(_keys, response, token_usage)\n\u001b[0;32m    363\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m system_fingerprint:\n",
      "\u001b[1;31mTypeError\u001b[0m: 'NoneType' object is not iterable"
     ]
    }
   ],
   "source": [
    "from langchain_core.output_parsers import PydanticOutputParser\n",
    "from langchain_core.prompts import PromptTemplate\n",
    "from langchain_openai import OpenAI\n",
    "from pydantic import BaseModel, Field, model_validator\n",
    "\n",
    "model = OpenAI(model_name=\"gpt-3.5-turbo-instruct\", temperature=0.0)\n",
    "\n",
    "# 定义你所需的数据结构。\n",
    "class Joke(BaseModel):\n",
    "    setup: str = Field(description=\"设置笑话的问题\")\n",
    "    punchline: str = Field(description=\"解答笑话的答案\")\n",
    "\n",
    "\n",
    "# 设置解析器并将指令注入提示模板。\n",
    "parser = PydanticOutputParser(pydantic_object=Joke)\n",
    "\n",
    "prompt = PromptTemplate(\n",
    "    template=\"回答用户的询问。\\n{format_instructions}\\n{query}\\n\",\n",
    "    input_variables=[\"query\"],\n",
    "    partial_variables={\"format_instructions\": parser.get_format_instructions()},\n",
    ")\n",
    "\n",
    "# 一个用于提示语言模型填充数据结构的查询。\n",
    "prompt_and_model = prompt | model\n",
    "output = prompt_and_model.invoke({\"query\": \"可以讲个笑话给我听吗？\"})\n",
    "parser.invoke(output)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "95d64f92-a694-41d1-903e-858bdb6b1b42",
   "metadata": {},
   "source": [
    "## 输出解析器方法 2函数调用模型\n",
    "当Open AI 发布函数调用时，游戏发生了变化。\n",
    "他们专门训练模型以输出结构化数据。指定Pydantic(一种python中的数据验证模式)模式并获得结构化输出变得非常容易\n",
    "\n",
    "有很多方法可以定义你的模式，我更喜欢使用Pydantic模型，因为它们的组织性很强。请参考OpenAI的文档了了解其他方法。\n",
    "\n",
    "使用此方法需要使用支持函数调用的模型，我将使用gpt4-0613\n",
    "\n",
    "## 示例1 : 简单"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "66914345-bb5e-4803-aa62-61b564cf3c94",
   "metadata": {},
   "outputs": [],
   "source": [
    "from pydantic import BaseModel, Field\n",
    "from typing import Optional\n",
    "\n",
    "class Person(BaseModel):\n",
    "    \"\"\"关于一个人的身份信息。\"\"\"\n",
    "\n",
    "    name: str = Field(..., description=\"该人的姓名\")\n",
    "    age: int = Field(..., description=\"该人的年龄\")\n",
    "    fav_food: Optional[str] = Field(None, description=\"该人最喜欢的食物\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "id": "523d078b-3e7e-48d6-854b-15581d005534",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Person(name='小丽', age=13, fav_food=None)"
      ]
     },
     "execution_count": 26,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from langchain_openai import ChatOpenAI\n",
    "\n",
    "llm = ChatOpenAI(model_name=\"gpt-4o\")\n",
    "\n",
    "structured_llm = llm.with_structured_output(Person)\n",
    "structured_llm.invoke(\n",
    "    \"小丽 13岁，小明刚满 12 岁并且喜欢吃菠菜。 小军比小丽大 10 岁。\"\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "id": "234a20a6-3b5a-4194-a9dd-2789c829d925",
   "metadata": {},
   "outputs": [],
   "source": [
    "from typing import Sequence\n",
    "\n",
    "class People(BaseModel):\n",
    "    \"\"\"识别文本中所有人物的信息。\"\"\"\n",
    "\n",
    "    people: Sequence[Person] = Field(..., description=\"文本中的人物信息\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "f365fb35-b010-4a7e-ae3e-31b3aa1cadc2",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "People(people=[Person(name='小丽', age=13, fav_food=None), Person(name='小明', age=12, fav_food='菠菜'), Person(name='小军', age=23, fav_food=None)])"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "structured_llm = llm.with_structured_output(People)\n",
    "structured_llm.invoke(\n",
    "    \"小丽 13岁，小明刚满 12 岁并且喜欢吃菠菜。 小军比小丽大 10 岁。\"\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "5ba944fa-fccc-4607-9b69-19db617544b9",
   "metadata": {},
   "outputs": [],
   "source": [
    "import enum\n",
    "\n",
    "llm = ChatOpenAI(model_name=\"gpt-4o\")\n",
    "\n",
    "class Product(str, enum.Enum):\n",
    "    CRM = \"CRM\"\n",
    "    VIDEO_EDITING = \"视频剪辑\"\n",
    "    HARDWARE = \"硬件\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "id": "3b392686-6d5f-4fa4-a331-cf2c057422ac",
   "metadata": {},
   "outputs": [],
   "source": [
    "class Products(BaseModel):\n",
    "    \"\"\"识别文本中提到的产品\"\"\"\n",
    "\n",
    "    products: Sequence[Product] = Field(..., description=\"文中提到的产品\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "8f6416fd-75e7-4ca8-bc9a-f14e70888e3b",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Products(products=[<Product.CRM: 'CRM'>, <Product.HARDWARE: '硬件'>, <Product.VIDEO_EDITING: '视频剪辑'>])"
      ]
     },
     "execution_count": 31,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "structured_llm = llm.with_structured_output(Products)\n",
    "structured_llm.invoke(\n",
    "    \"这个演示中的 CRM 很棒。喜欢硬件。麦克风也很酷。喜欢视频编辑\"\n",
    ")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "3ef70a86-d1a4-4c90-9f12-aa3d83e7d51c",
   "metadata": {},
   "source": [
    "## 索引 - 将文档结构化为LLM可以使用它们的格式\n",
    "文档加载器\n",
    "从其他来源导如数据的一洗了简单方法，与Open AI 插件特别是检索插件共享功能\n",
    "\n",
    "可以访问下面的网站看文档加载器的更多信息（https://python.langchain.com/docs/how_to/#document-loaders）\n",
    "\n",
    "[Llama index另外一个类似langchain的框架,主要是索引和提取]（https://llamahub.ai/）上还有更多信息\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "964ed82d-aed9-4eec-965d-619c1466652e",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "USER_AGENT environment variable not set, consider setting it to identify your requests.\n"
     ]
    }
   ],
   "source": [
    "from langchain.document_loaders import HNLoader"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "id": "0994c302-f49f-4572-8883-c8374b4a38d9",
   "metadata": {},
   "outputs": [],
   "source": [
    "loader = HNLoader(\"https://news.ycombinator.com/item?id=34422627\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "id": "7fc2663f-afab-461f-a6ee-45e30b72c7e0",
   "metadata": {},
   "outputs": [],
   "source": [
    "data = loader.load()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "a5c35fc7-d2e4-4b12-b4ff-48843de2d54b",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "发现 76 评论\n",
      "以下是一个示例:\n",
      "\n",
      "Ozzie_osman on Jan 18, 2023  \n",
      "             | next [–] \n",
      "\n",
      "LangChain is awesome. For people not sure what it's doing, large language models (LLMs) are veOzzie_osman on Jan 18, 2023  \n",
      "             | parent | next [–] \n",
      "\n",
      "Also, another library to check out is GPT Index (https://github.com/jerryjliu/gpt_ind\n"
     ]
    }
   ],
   "source": [
    "print (f\"发现 {len(data)} 评论\")\n",
    "print (f\"以下是一个示例:\\n\\n{''.join([x.page_content[:150] for x in data[:2]])}\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "1c665857-cba7-498e-abed-4f0bf6d7e824",
   "metadata": {},
   "source": [
    "## 电子书"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "id": "9e42819d-70de-4b18-b254-29db7000a7f9",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.document_loaders import GutenbergLoader\n",
    "\n",
    "loader = GutenbergLoader(\"https://www.gutenberg.org/cache/epub/2148/pg2148.txt\")\n",
    "\n",
    "data = loader.load()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "id": "817e5250-ce2b-4640-b28a-4c10802676d5",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "o.—_Seneca_.\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "      At Paris, just after dark one gusty evening in the autumn of 18-,\n",
      "\n",
      "\n",
      "      I was enjoying the twofold l\n"
     ]
    }
   ],
   "source": [
    "print(data[0].page_content[1855:1984])"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "ce204c3b-449c-4825-affa-0cd789973604",
   "metadata": {},
   "source": [
    "## URL和网页"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "id": "b2482e8d-4879-49e6-b009-81340dccd618",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'New:  \\r\\n\\r\\n What to Do  |\\r\\n Wokeness  |\\r\\n Founder Mode \\r\\n \\r\\n \\r\\n \\r\\n \\r\\n \\r\\n Want to start a startup?  Get funded by  Y Combinator .\\r\\n \\r\\n \\r\\n \\r\\n\\r\\n \\n\\r\\n \\r\\n \\r\\n \\r\\n© mmxxv pg'"
      ]
     },
     "execution_count": 39,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from langchain.document_loaders import UnstructuredURLLoader\n",
    "\n",
    "urls = [\n",
    "    \"http://www.paulgraham.com/\",\n",
    "]\n",
    "\n",
    "loader = UnstructuredURLLoader(urls=urls)\n",
    "\n",
    "data = loader.load()\n",
    "\n",
    "data[0].page_content"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "4c018d12-66bd-4c52-bf76-b2d83ed3af98",
   "metadata": {},
   "source": [
    "## 文本分割器\n",
    "通常的文档对于基本上所有的LLM来说都太长（就像一本书）。因此需要将其分割成块。文本分割器可帮助完成此操作。\n",
    "\n",
    "可以通过多种方式将文本分割成块，可以访问下面的链接（https://python.langchain.com/v0.1/docs/modules/data_connection/document_transformers/code_splitter）中不同的方法来确定哪种方法最合适。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "id": "43ac6638-a812-42cf-b984-8c9445de31b9",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.text_splitter import RecursiveCharacterTextSplitter"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "id": "ff8c70fb-3af8-4173-8672-f0bc38616e2b",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "你有 1 个文档\n"
     ]
    }
   ],
   "source": [
    "# 这是一个很长的文档\n",
    "# with open('./data/Xiyouji.txt') as f:\n",
    "# ✅ 指定编码\n",
    "with open('./data/Xiyouji.txt', 'r', encoding='utf-8') as f: \n",
    "    pg_work = f.read()\n",
    "\n",
    "print (f\"你有 {len([pg_work])} 个文档\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 44,
   "id": "8ba02d11-4a8b-423f-8e7d-ac92357d2f9a",
   "metadata": {},
   "outputs": [],
   "source": [
    "text_splitter = RecursiveCharacterTextSplitter(\n",
    "    # 定义chunk size.\n",
    "    chunk_size = 500,\n",
    "    chunk_overlap  = 20,\n",
    ")\n",
    "\n",
    "texts = text_splitter.create_documents([pg_work])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "id": "f51a6b21-cf6d-43b8-8923-793e41fecb2e",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "你有 1784 个文档\n"
     ]
    }
   ],
   "source": [
    "print (f\"你有 {len(texts)} 个文档\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "id": "374d54f8-3ecf-4713-96e9-639c8086a06c",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "前言:\n",
      "第一回     靈根育孕源流出　心性修持大道生\n",
      "\n",
      "\n",
      "　　詩曰：\n",
      "　　　　混沌未分天地亂，茫茫渺渺無人見。\n",
      "　　　　自從盤古破鴻濛，開闢從茲清濁辨。\n",
      "　　　　覆載群生仰至仁，發明萬物皆成善。\n",
      "　　　　欲知造化會元功，須看西遊釋厄傳。 \n",
      "\n",
      "蓋聞天地之數，有十二萬九千六百歲為一元。將一元分為十二會，乃子、丑、寅\n",
      "、卯、辰、巳、午、未、申、酉、戌、亥之十二支也。每會該一萬八百歲。且就\n",
      "一日而論：子時得陽氣，而丑則雞鳴﹔寅不通光，而卯則日出﹔辰時食後，而巳\n",
      "則挨排﹔日午天中，而未則西蹉﹔申時晡，而日落酉，戌黃昏，而人定亥。譬於\n",
      "大數，若到戌會之終，則天地昏曚而萬物否矣。再去五千四百歲，交亥會之初，\n",
      "則當黑暗，而兩間人物俱無矣，故曰混沌。又五千四百歲，亥會將終，貞下起元\n",
      "，近子之會，而復逐漸開明。邵康節曰：：「冬至子之半，天心無改移。一陽初\n",
      "動處，萬物未生時。」到此，天始有根。再五千四百歲，正當子會，輕清上騰，\n",
      "有日，有月，有星，有辰。日、月、星、辰，謂之四象。故曰，天開於子。又經\n",
      "五千四百歲，子會將終，近丑之會，而逐漸堅實。《易》曰：「大哉乾元！至哉\n",
      "坤元！萬物資生，乃順承天。」至此，地始凝結。再五千四百歲，正當丑會，重\n",
      "濁下凝，有水，有火，有山，有石，有土。水、火、山、石、土，謂之五形。故\n",
      "曰，地闢於丑。又經五千四百歲，丑會終而寅會之初，發生萬物。曆曰：「天氣\n"
     ]
    }
   ],
   "source": [
    "print (\"前言:\")\n",
    "print (texts[0].page_content, \"\\n\")\n",
    "print (texts[1].page_content)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "id": "5937b81a-68ca-42af-bbf5-8dccbfe2d671",
   "metadata": {},
   "outputs": [],
   "source": [
    "from langchain.document_loaders import TextLoader\n",
    "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
    "from langchain.vectorstores import FAISS\n",
    "from langchain.embeddings import OpenAIEmbeddings\n",
    "\n",
    "loader = TextLoader('./data/worked.txt')\n",
    "documents = loader.load()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "f7e59741-da9b-4388-86a3-443500188ea3",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Get your splitter ready\n",
    "text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=50)\n",
    "\n",
    "# Split your docs into texts\n",
    "texts = text_splitter.split_documents(documents)\n",
    "\n",
    "api_key = os.environ.get('OPENAI_API_KEY')\n",
    "base_url = os.environ.get('OPENAI_BASE_URL')\n",
    "# Get embedding engine ready\n",
    "embeddings = OpenAIEmbeddings(openai_api_key=api_key)\n",
    "\n",
    "# Embedd your texts\n",
    "db = FAISS.from_documents(texts, embeddings)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "73ea3833-28c8-4cb4-a52b-509a9da42c04",
   "metadata": {},
   "outputs": [],
   "source": [
    "import faiss  # 若无报错则安装成功"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "82b8d6d1-f7c9-4ad7-b1ea-a650c4c95698",
   "metadata": {},
   "outputs": [],
   "source": [
    "# Init your retriever. Asking for just 1 document back\n",
    "retriever = db.as_retriever()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "a971f08f-f2b4-4d06-927f-16e1226b61f7",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "VectorStoreRetriever(tags=['FAISS', 'OpenAIEmbeddings'], vectorstore=<langchain_community.vectorstores.faiss.FAISS object at 0x0000027891520BE0>)"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "retriever"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "id": "f4426bf6-2717-4bdd-8e6e-20e59e6852b4",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\MI\\AppData\\Local\\Temp\\ipykernel_11272\\1946296395.py:1: LangChainDeprecationWarning: The method `BaseRetriever.get_relevant_documents` was deprecated in langchain-core 0.1.46 and will be removed in 1.0. Use invoke instead.\n",
      "  docs = retriever.get_relevant_documents(\"what types of things did the author want to build?\")\n"
     ]
    }
   ],
   "source": [
    "docs = retriever.get_relevant_documents(\"what types of things did the author want to build?\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "deab3644-02cd-466d-982a-c0db71d0787d",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\MI\\AppData\\Local\\Temp\\ipykernel_10032\\3034638982.py:1: LangChainDeprecationWarning: The method `BaseRetriever.get_relevant_documents` was deprecated in langchain-core 0.1.46 and will be removed in 1.0. Use invoke instead.\n",
      "  docs = retriever.get_relevant_documents(\"西游记孙悟空是怎么诞生的?\")\n"
     ]
    }
   ],
   "source": [
    "docs = retriever.get_relevant_documents(\"西游记孙悟空是怎么诞生的?\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "id": "ff7aebe8-9f5e-4789-8bcb-b2fdff6ed181",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "February 2021Before college the two main things I worked on, outside of school,\n",
      "were writing and programming. I didn't write essays. I wrote what\n",
      "beginning writers were supposed to write then, and pro\n",
      "\n",
      "wasn't in 1993. It's called Yorkville, and that was my new home.\n",
      "Now I was a New York artist 聴 in the strictly technical sense of\n",
      "making paintings and living in New York.I was nervous about money, bec\n"
     ]
    }
   ],
   "source": [
    "print(\"\\n\\n\".join([x.page_content[:200] for x in docs[:2]]))"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "ea58a2f6-1ce0-466a-9908-bfb301e12d0d",
   "metadata": {},
   "source": [
    "## VectorStores\n",
    "用于存储向量的数据库。最流行的是Pinecone和Weaviate 。OpenAIs retriever文档上有更多示例。Chroma 哈FAISS易于本地使用。\n",
    "\n",
    "从概念上讲，将它们视为带入（向量）列和元数据列的表格。\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "952d8953-dace-4fd8-9530-76105639d3a6",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\MI\\AppData\\Local\\Temp\\ipykernel_10032\\2882706404.py:16: LangChainDeprecationWarning: The class `OpenAIEmbeddings` was deprecated in LangChain 0.0.9 and will be removed in 1.0. An updated version of the class exists in the langchain-openai package and should be used instead. To use it run `pip install -U langchain-openai` and import as `from langchain_openai import OpenAIEmbeddings`.\n",
      "  embeddings = OpenAIEmbeddings(openai_api_key=api_key)\n"
     ]
    }
   ],
   "source": [
    "from langchain.document_loaders import TextLoader\n",
    "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
    "from langchain.vectorstores import FAISS\n",
    "from langchain.embeddings import OpenAIEmbeddings\n",
    "\n",
    "loader = TextLoader('./data/worked.txt')\n",
    "documents = loader.load()\n",
    "\n",
    "# Get your splitter ready\n",
    "text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=50)\n",
    "\n",
    "# Split your docs into texts\n",
    "texts = text_splitter.split_documents(documents)\n",
    "api_key = os.environ.get('OPENAI_API_KEY')\n",
    "# Get embedding engine ready\n",
    "embeddings = OpenAIEmbeddings(openai_api_key=api_key)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 60,
   "id": "e432c3f6-795b-4838-b62d-25a4d52662e7",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "You have 78 documents\n"
     ]
    }
   ],
   "source": [
    "print (f\"You have {len(texts)} documents\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 61,
   "id": "56bd0691-a205-440d-869b-91aad810c83a",
   "metadata": {},
   "outputs": [],
   "source": [
    "embedding_list = embeddings.embed_documents([text.page_content for text in texts])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 62,
   "id": "f708ef79-ed6c-428e-95d5-024b9107316a",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "You have 78 embeddings\n",
      "Here's a sample of one: [-0.0015781445012143602, -0.010170830445327557, -0.012978686991460873]...\n"
     ]
    }
   ],
   "source": [
    "print (f\"You have {len(embedding_list)} embeddings\")\n",
    "print (f\"Here's a sample of one: {embedding_list[0][:3]}...\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "2d485ede-5858-4229-aad1-f5e652272009",
   "metadata": {},
   "source": [
    "## 记忆\n",
    "帮助LLM记住信息。\n",
    "\n",
    "记忆是一个有点宽泛的术语。它可以简单到记住你过去聊过的信息，也可以更复杂地检索信息。\n",
    "\n",
    "我们将把它放在聊天消息用例中。这将用于聊天机器人。\n",
    "\n",
    "记忆有很多种类型，可以访问下面的链接（https://python.langchain.com/v0.1/docs/modules/memory/）,看看哪一种适合特定的用例。\n",
    "\n",
    "聊天消息历史记录"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "6efc5d6d-93f6-4dcc-a40f-1cd5abc4b195",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\MI\\AppData\\Local\\Temp\\ipykernel_10032\\3482737572.py:6: LangChainDeprecationWarning: The class `ChatOpenAI` was deprecated in LangChain 0.0.10 and will be removed in 1.0. An updated version of the class exists in the langchain-openai package and should be used instead. To use it run `pip install -U langchain-openai` and import as `from langchain_openai import ChatOpenAI`.\n",
      "  chat = ChatOpenAI(temperature=0, openai_api_key=api_key)\n"
     ]
    }
   ],
   "source": [
    "from langchain.memory import ChatMessageHistory\n",
    "from langchain.chat_models import ChatOpenAI\n",
    "\n",
    "api_key = os.environ.get('OPENAI_API_KEY')\n",
    "\n",
    "chat = ChatOpenAI(temperature=0, openai_api_key=api_key)\n",
    "\n",
    "history = ChatMessageHistory()\n",
    "\n",
    "history.add_ai_message(\"hi!\")\n",
    "\n",
    "history.add_user_message(\"what is the capital of france?\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "id": "1df323b2-1845-40f4-bb90-7790f2c93e5b",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[AIMessage(content='hi!'),\n",
       " HumanMessage(content='what is the capital of france?')]"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "history.messages"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 66,
   "id": "5a200096-50c3-47a2-a7e1-9176ae9d72b9",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "AIMessage(content='The capital of France is Paris.', response_metadata={'token_usage': {'completion_tokens': 7, 'prompt_tokens': 20, 'total_tokens': 27, 'completion_tokens_details': None, 'prompt_tokens_details': None}, 'model_name': 'gpt-3.5-turbo', 'system_fingerprint': 'fp_0165350fbb', 'finish_reason': 'stop', 'logprobs': None}, id='run-cdd60a98-6e62-421e-a4cc-f8a97f2b25ad-0')"
      ]
     },
     "execution_count": 66,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "ai_response = chat(history.messages)\n",
    "ai_response"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 67,
   "id": "ce23bd9e-d4ef-41ec-9c8f-3cabe2216038",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[AIMessage(content='hi!'),\n",
       " HumanMessage(content='what is the capital of france?'),\n",
       " AIMessage(content='The capital of France is Paris.')]"
      ]
     },
     "execution_count": 67,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "history.add_ai_message(ai_response.content)\n",
    "history.messages"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "4b8a12e8-ff0e-4e72-85d0-c2bf60992fdf",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "C:\\Users\\MI\\AppData\\Local\\Temp\\ipykernel_10032\\184067308.py:6: LangChainDeprecationWarning: The class `OpenAI` was deprecated in LangChain 0.0.10 and will be removed in 1.0. An updated version of the class exists in the langchain-openai package and should be used instead. To use it run `pip install -U langchain-openai` and import as `from langchain_openai import OpenAI`.\n",
      "  llm = OpenAI(temperature=1, openai_api_key=api_key)\n"
     ]
    }
   ],
   "source": [
    "from langchain.llms import OpenAI\n",
    "from langchain.chains import LLMChain\n",
    "from langchain.prompts import PromptTemplate\n",
    "from langchain.chains import SimpleSequentialChain\n",
    "\n",
    "llm = OpenAI(temperature=1, openai_api_key=api_key)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3e5d4f86-0d86-4b18-898e-50f0ff3e94bf",
   "metadata": {},
   "outputs": [],
   "source": [
    "template = \"\"\"Your job is to come up with a classic dish from the area that the users suggests.\n",
    "% USER LOCATION\n",
    "{user_location}\n",
    "\n",
    "YOUR RESPONSE:\n",
    "\"\"\"\n",
    "prompt_template = PromptTemplate(input_variables=[\"user_location\"], template=template)\n",
    "\n",
    "# Holds my 'location' chain\n",
    "location_chain = LLMChain(llm=llm, prompt=prompt_template)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "483e3c02-dbbd-4870-933d-8d086a6d0ad4",
   "metadata": {},
   "outputs": [],
   "source": [
    "template = \"\"\"Given a meal, give a short and simple recipe on how to make that dish at home.\n",
    "% MEAL\n",
    "{user_meal}\n",
    "\n",
    "YOUR RESPONSE:\n",
    "\"\"\"\n",
    "prompt_template = PromptTemplate(input_variables=[\"user_meal\"], template=template)\n",
    "\n",
    "# Holds my 'meal' chain\n",
    "meal_chain = LLMChain(llm=llm, prompt=prompt_template)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "b22baaa6-d375-41b7-939d-6d9beee3d434",
   "metadata": {},
   "outputs": [],
   "source": [
    "overall_chain = SimpleSequentialChain(chains=[location_chain, meal_chain], verbose=True)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "1e8aea88-cb89-4abf-b3dc-c11ddce0c07e",
   "metadata": {},
   "outputs": [],
   "source": [
    "review = overall_chain.run(\"上海\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "8a4d922f-2474-4ab0-b59e-ef475ab3db86",
   "metadata": {},
   "source": [
    "## 链\n",
    "自动组合不同的LLM调用和操作\n",
    "\n",
    "例如：摘要 #1、摘要#2 、摘要#3 > 最终摘要\n",
    "\n",
    "可以查看下面的链接（https://python.lanchain.com/v0.1/modules/chains）搜搜看看哪个最适合自己的用例。\n"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "14ffaa8a-be9d-46c1-81ea-4d75dd4435c9",
   "metadata": {},
   "source": [
    "1、简单顺序链 <br/>\n",
    "简单的链可以将LLM的输出用作另一个的输入。适合分解任务（让LLM一次完成一项任务）"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "id": "d6dbda66-d3f3-4273-86fa-1af2ba45bfdf",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Error in StdOutCallbackHandler.on_chain_start callback: AttributeError(\"'NoneType' object has no attribute 'get'\")\n",
      "Error in StdOutCallbackHandler.on_chain_start callback: AttributeError(\"'NoneType' object has no attribute 'get'\")\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"第一回     靈根育孕源流出　心性修持大道生\n",
      "\n",
      "\n",
      "　　詩曰：\n",
      "　　　　混沌未分天地亂，茫茫渺渺無人見。\n",
      "　　　　自從盤古破鴻濛，開闢從茲清濁辨。\n",
      "　　　　覆載群生仰至仁，發明萬物皆成善。\n",
      "　　　　欲知造化會元功，須看西遊釋厄傳。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"蓋聞天地之數，有十二萬九千六百歲為一元。將一元分為十二會，乃子、丑、寅\n",
      "、卯、辰、巳、午、未、申、酉、戌、亥之十二支也。每會該一萬八百歲。且就\n",
      "一日而論：子時得陽氣，而丑則雞鳴﹔寅不通光，而卯則日出﹔辰時食後，而巳\n",
      "則挨排﹔日午天中，而未則西蹉﹔申時晡，而日落酉，戌黃昏，而人定亥。譬於\n",
      "大數，若到戌會之終，則天地昏曚而萬物否矣。再去五千四百歲，交亥會之初，\n",
      "則當黑暗，而兩間人物俱無矣，故曰混沌。又五千四百歲，亥會將終，貞下起元\n",
      "，近子之會，而復逐漸開明。邵康節曰：：「冬至子之半，天心無改移。一陽初\n",
      "動處，萬物未生時。」到此，天始有根。再五千四百歲，正當子會，輕清上騰，\n",
      "有日，有月，有星，有辰。日、月、星、辰，謂之四象。故曰，天開於子。又經\n",
      "五千四百歲，子會將終，近丑之會，而逐漸堅實。《易》曰：「大哉乾元！至哉\n",
      "坤元！萬物資生，乃順承天。」至此，地始凝結。再五千四百歲，正當丑會，重\n",
      "濁下凝，有水，有火，有山，有石，有土。水、火、山、石、土，謂之五形。故\n",
      "曰，地闢於丑。又經五千四百歲，丑會終而寅會之初，發生萬物。曆曰：「天氣\n",
      "下降，地氣上升﹔天地交合，群物皆生。」至此，天清地爽，陰陽交合。再五千\n",
      "四百歲，子會將終，近丑之會，而逐漸堅實。《易》曰：「大哉乾元！至哉坤元\n",
      "！萬物資生，乃順承天。」至此，地始凝結。再五千四百歲，正當丑會，重濁下\n",
      "凝，有水，有火，有山，有石，有土。水、火、山、石、土，謂之五形。故曰，\n",
      "地闢於丑。又經五千四百歲，丑會終而寅會之初，發生萬物。曆曰：「天氣下降\n",
      "，地氣上升﹔天地交合，群物皆生。」至此，天清地爽，陰陽交合。再五千四百\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"，地氣上升﹔天地交合，群物皆生。」至此，天清地爽，陰陽交合。再五千四百\n",
      "歲，正當寅會，生人，生獸，生禽，正謂天地人，三才定位。故曰，人生於寅。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"感盤古開闢，三皇治世，五帝定倫，世界之間，遂分為四大部洲：曰東勝神洲，\n",
      "曰西牛賀洲，曰南贍部洲，曰北俱蘆洲。這部書單表東勝神洲。海外有一國土，\n",
      "名曰傲來國。國近大海，海中有一座名山，喚為花果山。此山乃十洲之祖脈，三\n",
      "島之來龍，自開清濁而立，鴻濛判後而成。真個好山！有詞賦為證。賦曰：勢鎮\n",
      "汪洋，威寧瑤海。勢鎮汪洋，潮湧銀山魚入穴﹔威寧瑤海，波翻雪浪蜃離淵。水\n",
      "火方隅高積上，東海之處聳崇巔。丹崖怪石，削壁奇峰。丹崖上，彩鳳雙鳴﹔削\n",
      "壁前，麒麟獨臥。峰頭時聽錦雞鳴，石窟每觀龍出入。林中有壽鹿仙狐，樹上有\n",
      "靈禽玄鶴。瑤草奇花不謝，青松翠柏長春。仙桃常結果，修竹每留雲。一條澗壑\n",
      "籐蘿密，四面原堤草色新。正是百川會處擎天柱，萬劫無移大地根。\n",
      "\n",
      "那座山正當頂上，有一塊仙石。其石有三丈六尺五寸高，有二丈四尺圍圓。三丈\n",
      "六尺五寸高，按周天三百六十五度﹔二丈四尺圍圓，按政曆二十四氣。上有九竅\n",
      "八孔，按九宮八卦。四面更無樹木遮陰，左右倒有芝蘭相襯。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"蓋自開闢以來，每受天真地秀，日精月華，感之既久，遂有靈通之意。內育仙胞\n",
      "，一日迸裂，產一石卵，似圓毬樣大。因見風，化作一個石猴，五官俱備，四肢\n",
      "皆全。便就學爬學走，拜了四方。目運兩道金光，射沖斗府。驚動高天上聖大慈\n",
      "仁者玉皇大天尊玄穹高上帝，駕座金闕雲宮靈霄寶殿，聚集仙卿，見有金光燄燄\n",
      "，即命千里眼、順風耳開南天門觀看。二將果奉旨出門外，看的真，聽的明。須\n",
      "臾回報道：「臣奉旨觀聽金光之處，乃東勝神洲海東傲來小國之界，有一座花果\n",
      "山，山上有一仙石，石產一卵，見風化一石猴，在那裏拜四方，眼運金光，射沖\n",
      "斗府。如今服餌水食，金光將潛息矣。」玉帝垂賜恩慈曰：「下方之物，乃天地\n",
      "精華所生，不足為異。」\n",
      "\n",
      "那猴在山中，卻會行走跳躍，食草木，飲澗泉，採山花，覓樹果﹔與狼蟲為伴，\n",
      "虎豹為群，獐鹿為友，獼猿為親﹔夜宿石崖之下，朝遊峰洞之中。真是：「山中\n",
      "無甲子，寒盡不知年。」\n",
      "　　一朝天氣炎熱，與群猴避暑，都在松陰之下頑耍。你看他一個個：\n",
      "跳樹攀枝，採花覓果﹔拋彈子，?麼兒﹔跑沙窩，砌寶塔﹔趕蜻蜓，撲蜡﹔參老\n",
      "天，拜菩薩﹔扯葛籐，編草﹔捉虱子，咬又掐﹔理毛衣，剔指甲。挨的挨，擦的\n",
      "擦﹔推的推，壓的壓﹔扯的扯，拉的拉：青松林下任他頑，綠水澗邊隨洗濯。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"一群猴子耍了一會，卻去那山澗中洗澡。見那股澗水奔流，真個似滾瓜湧濺。古\n",
      "云：「禽有禽言，獸有獸語。」眾猴都道：「這股水不知是那裏的水。我們今日\n",
      "趕閑無事，順澗邊往上溜頭尋看源流，耍子去耶！」喊一聲，都拖男挈女，呼弟\n",
      "呼兄，一齊跑來，順澗爬山，直至源流之處，乃是一股瀑布飛泉。但見那：\n",
      "一派白虹起，千尋雪浪飛。\n",
      "　　　　海風吹不斷，江月照還依。\n",
      "　　　　冷氣分青嶂，餘流潤翠微。\n",
      "　　　　潺湲名瀑布，真似掛簾帷。\n",
      "\n",
      "眾猴拍手稱揚道：「好水，好水！原來此處遠通山腳之下，直接大海之波。」又\n",
      "道：「那一個有本事的，鑽進去尋個源頭出來，不傷身體者，我等即拜他為王。」\n",
      "連呼了三聲，忽見叢雜中跳出一個石猴，應聲高叫道：「我進去，我進去。」好\n",
      "猴！也是他：\n",
      "        今日芳名顯，時來大運通。\n",
      "　　　　有緣居此地，王遣入仙宮。\n",
      "\n",
      "你看他瞑目蹲身，將身一縱，徑跳入瀑布泉中，忽睜睛抬頭觀看，那裏邊卻無水\n",
      "無波，明明朗朗的一架橋梁。他住了身，定了神，仔細再看，原來是座鐵板橋。\n",
      "橋下之水，沖貫於石竅之間，倒掛流出去，遮閉了橋門。卻又欠身上橋頭，再走\n",
      "再看，卻似有人家住處一般，真個好所在。但見那：\n",
      "\n",
      "翠蘚堆藍，白雲浮玉，光搖片片煙霞。虛窗靜室，滑凳板生花。乳窟龍珠倚掛，\n",
      "縈迴滿地奇葩。鍋灶傍崖存火跡，樽罍靠案見殽渣。石座石床真可愛，石盆石碗\n",
      "更堪誇。又見那一竿兩竿修竹，三點五點梅花。幾樹青松常帶雨，渾然像個人家。\n",
      "\n",
      "看罷多時，跳過橋中間，左右觀看。只見正當中有一石碣，碣上有一行楷書大字\n",
      "，鐫著「花果山福地，水簾洞洞天」。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"石猿喜不自勝，急抽身往外便走，復瞑目蹲身，跳出水外，打了兩個呵呵道：\n",
      "「大造化！大造化！」眾猴把他圍住，問道：「裏面怎麼樣？水有多深？」石猴\n",
      "道：「沒水！沒水！原來是一座鐵板橋，橋那邊是一座天造地設的家當。」眾猴\n",
      "道：「怎見得是個家當？」石猴笑道：「這股水乃是橋下沖貫石橋，倒掛下來遮\n",
      "閉門戶的。橋邊有花有樹，乃是一座石房。房內有石窩、石灶、石碗、石盆、石\n",
      "床、石凳。中間一塊石碣上，鐫著『花果山福地，水簾洞洞天』。真個是我們安\n",
      "身之處。裏面且是寬闊，容得千百口老小。\n",
      "我們都進去住，也省得受老天之氣。這裏邊：\n",
      "　　　　刮風有處躲，下雨好存身。\n",
      "　　　　霜雪全無懼，雷聲永不聞。\n",
      "　　　　煙霞常照耀，祥瑞每蒸熏。\n",
      "　　　　松竹年年秀，奇花日日新。」\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"眾猴聽得，個個歡喜。都道：「你還先走，帶我們進去，進去。」石猴卻又瞑目\n",
      "蹲身，往裏一跳，叫道：「都隨我進來，進來。」那些猴有膽大的，都跳進去了\n",
      "﹔膽小的，一個個伸頭縮頸，抓耳撓腮，大聲叫喊，纏一會，也都進去了。跳過\n",
      "橋頭，一個個搶盆奪碗，佔灶爭床，搬過來，移過去，正是猴性頑劣，再無一個\n",
      "寧時，只搬得力倦神疲方止。石猿端坐上面道：「列位呵，『人而無信，不知其\n",
      "可。』你們才說有本事進得來，出得去，不傷身體者，就拜他為王。我如今進來\n",
      "又出去，出去又進來，尋了這一個洞天與列位安眠穩睡，各享成家之福，何不拜\n",
      "我為王？」眾猴聽說，即拱伏無違，一個個序齒排班，朝上禮拜，都稱「千歲大\n",
      "王」。自此，石猿高登王位，將「石」字兒隱了，遂稱「美猴王」。有詩為證。\n",
      "詩曰：\n",
      "　　　　三陽交泰產群生，仙石胞含日月精。\n",
      "　　　　借卵化猴完大道，假他名姓配丹成。\n",
      "　　　　內觀不識因無相，外合明知作有形。\n",
      "　　　　歷代人人皆屬此，稱王稱聖任縱橫。\n",
      "\n",
      "美猴王領一群猿猴、獼猴、馬猴等，分派了君臣佐使。朝遊花果山，暮宿水簾洞\n",
      "，合契同情，不入飛鳥之叢，不從走獸之類，獨自為王，不勝歡樂。是以：\n",
      "　　　　春採百花為飲食，夏尋諸果作生涯。\n",
      "　　　　秋收芋栗延時節，冬覓黃精度歲華。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"美猴王享樂天真，何期有三五百載。一日，與群猴喜宴之間，忽然憂惱，墮下淚\n",
      "來。眾猴慌忙羅拜道：「大王何為煩惱？」猴王道：「我雖在歡喜之時，卻有一\n",
      "點兒遠慮，故此煩惱。」眾猴又笑道：「大王好不知足。我等日日歡會，在仙山\n",
      "福地，古洞神洲，不伏麒麟轄，不伏鳳凰管，又不伏人間王位所拘束，自由自在\n",
      "，乃無量之福，為何遠慮而憂也？」猴王道：「今日雖不歸人王法律，不懼禽獸\n",
      "威嚴，將來年老血衰，暗中有閻王老子管著，一旦身亡，可不枉生世界之中，不\n",
      "得久注天人之內？」眾猴聞此言，一個個掩面悲啼，俱以無常為慮。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"只見那班部中，忽跳出一個通背猿猴，厲聲高叫道：「大王若是這般遠慮，真所\n",
      "謂道心開發也。如今五蟲之內，惟有三等名色不伏閻王老子所管。」猴王道：\n",
      "「你知那三等人？」猿猴道：「乃是佛與仙與神聖三者，躲過輪迴，不生不滅，\n",
      "與天地山川齊壽。」猴王道：「此三者居於何所？」猿猴道：「他只在閻浮世界\n",
      "之中，古洞仙山之內。」猴王聞之，滿心歡喜道：「我明日就辭汝等下山，雲遊\n",
      "海角，遠涉天涯，務必訪此三者，學一個不老長生，常躲過閻君之難。」噫！這\n",
      "句話，頓教跳出輪迴網，致使齊天大聖成。眾猴鼓掌稱揚，都道：「善哉，善哉\n",
      "！我等明日越嶺登山，廣尋些果品，大設筵宴送大王也。」\n",
      "　　次日，眾猴果去採仙桃，摘異果，刨山藥，斸黃精。芝蘭香蕙，瑤草奇花，\n",
      "般般件件，整整齊齊，擺開石凳石桌，排列仙酒仙殽。但見那：\n",
      "金丸珠彈，紅綻黃肥。金丸珠彈臘櫻桃，色真甘美﹔紅綻黃肥熟梅子，味果香酸\n",
      "。鮮龍眼，肉甜皮薄﹔火荔枝，核小囊紅。林檎碧實連枝獻，枇杷緗苞帶葉擎。\n",
      "兔頭梨子雞心棗，消渴除煩更解酲。香桃爛杏，美甘甘似玉液瓊漿﹔脆李楊梅，\n",
      "酸蔭蔭如脂酥膏酪。紅囊黑子熟西瓜，四瓣黃皮大柿子。石榴裂破，丹砂粒現火\n",
      "晶珠﹔芋栗剖開，堅硬肉團金瑪瑙。胡桃銀杏可傳茶，椰子葡萄能做酒。榛松榧\n",
      "柰滿盤盛，橘蔗柑橙盈案擺。熟煨山藥，爛煮黃精。搗碎茯苓並薏苡，石鍋微火\n",
      "漫炊羹。人間縱有珍饈味，怎比山猴樂更寧。\n",
      "\n",
      "群猴尊美猴王上坐，各依齒肩排於下邊，一個個輪流上前奉酒、奉花、奉果，痛\n",
      "飲了一日。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"群猴尊美猴王上坐，各依齒肩排於下邊，一個個輪流上前奉酒、奉花、奉果，痛\n",
      "飲了一日。\n",
      "\n",
      "次日，美猴王早起，教：「小的們，替我折些枯松，編作?子，取個竹竿作篙，\n",
      "收拾些果品之類，我將去也。」果獨自登?，儘力撐開，飄飄蕩蕩，徑向大海波\n",
      "中，趁天風，來渡南贍部洲地界。這一去，正是那：\n",
      "        天產仙猴道行隆，離山駕?趁天風。\n",
      "　　　　飄洋過海尋仙道，立志潛心建大功。\n",
      "　　　　有分有緣休俗願，無憂無慮會元龍。\n",
      "　　　　料應必遇知音者，說破源流萬法通。\n",
      "\n",
      "也是他運至時來，自登木?之後，連日東南風緊，將他送到西北岸前，乃是南贍\n",
      "部洲地界。持篙試水，偶得淺水，棄了?子，跳上岸來。只見海邊有人捕魚、打\n",
      "雁、穵蛤、淘鹽。他走近前，弄個把戲，妝個虎，嚇得那些人丟筐棄網，四散奔\n",
      "跑。將那跑不動的拿住一個，剝了他衣裳，也學人穿在身上。搖搖擺擺，穿州過\n",
      "府，在市廛中學人禮，學人話。朝餐夜宿，一心裏訪問佛、仙、神聖之道，覓個\n",
      "長生不老之方。見世人都是為名為利之徒，更無一個為身命者。正是那：\n",
      "　　　　爭名奪利幾時休？早起遲眠不自由！\n",
      "　　　　騎著驢騾思駿馬，官居宰相望王侯。\n",
      "　　　　只愁衣食耽勞碌，何怕閻君就取勾。\n",
      "　　　　繼子蔭孫圖富貴，更無一個肯回頭。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"猴王參訪仙道，無緣得遇。在於南贍部洲，串長城，遊小縣，不覺八九年餘。忽\n",
      "行至西洋大海，他想著海外必有神仙。獨自個依前作?，又飄過西海，直至西牛\n",
      "賀洲地界。登岸遍訪多時，忽見一座高山秀麗，林麓幽深。他也不怕狼蟲，不懼\n",
      "虎豹，登上山頂上觀看。果是好山：\n",
      "千峰排戟，萬仞開屏。日映嵐光輕鎖翠，雨收黛色冷含青。瘦籐纏老樹，古渡界\n",
      "幽程。奇花瑞草，修竹喬松。修竹喬松，萬載常青欺福地﹔奇花瑞草，四時不謝\n",
      "賽蓬瀛。幽鳥啼聲近，源泉響溜清。重重谷壑芝蘭繞，處處巉崖苔蘚生。起伏巒\n",
      "頭龍脈好，必有高人隱姓名。\n",
      "\n",
      "正觀看間，忽聞得林深之處有人言語。急忙趨步，穿入林中，側耳而聽，原來是\n",
      "歌唱之聲。歌曰：\n",
      "「觀棋柯爛，伐木丁丁，雲邊谷口徐行。賣薪沽酒，狂笑自陶情。蒼逕秋高，對\n",
      "月枕松根，一覺天明。認舊林，登崖過嶺，持斧斷枯籐。收來成一擔，行歌市上\n",
      "，易米三升。更無些子爭競，時價平平。不會機謀巧算，沒榮辱，恬淡延生。相\n",
      "逢處，非仙即道，靜坐講黃庭。」\n",
      "\n",
      "美猴王聽得此言，滿心歡喜道：「神仙原來藏在這裏！」即忙跳入裏面，仔細再\n",
      "看，乃是一個樵子，在那裏舉斧砍柴。但看他打扮非常：\n",
      "\n",
      "頭上戴箬笠，乃是新筍初脫之籜。身上穿布衣，乃是木綿撚就之紗。腰間繫環絛\n",
      "，乃是老蠶口吐之絲。足下踏草履，乃是枯莎槎就之爽。手執?鋼斧，擔挽火麻\n",
      "繩。扳松劈枯樹，爭似此樵能。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"猴王近前叫道：「老神仙，弟子起手。」那樵漢慌忙丟了斧，轉身答禮道：「不\n",
      "當人，不當人。我拙漢衣食不全，怎敢當『神仙』二字？」猴王道：「你不是神\n",
      "仙，如何說出神仙的話來？」樵夫道：「我說甚麼神仙話？」猴王道：「我才來\n",
      "至林邊，只聽的你說：『相逢處，非仙即道，靜坐講《黃庭》。』《黃庭》乃道\n",
      "德真言，非神仙而何？」樵夫笑道：「實不瞞你說，這個詞名做《滿庭芳》，乃\n",
      "一神仙教我的。那神仙與我舍下相鄰，他見我家事勞苦，日常煩惱，教我遇煩惱\n",
      "時，即把這詞兒念念，一則散心，二則解困。我才有些不足處思慮，故此念念，\n",
      "不期被你聽了。」猴王道：「你家既與神仙相鄰，何不從他修行？學得個不老之\n",
      "方，卻不是好？」樵夫道：「我一生命苦：自幼蒙父母養育至八九歲，才知人事\n",
      "，不幸父喪，母親居孀。再無兄弟姊妹，只我一人，沒奈何，早晚侍奉。如今母\n",
      "老，一發不敢拋離。卻又田園荒蕪，衣食不足，只得斫兩束柴薪，挑向市廛之間\n",
      "，貨幾文錢，糴幾升米，自炊自造，安排些茶飯，供養老母。所以不能修行。」\n",
      "\n",
      "猴王道：「據你說起來，乃是一個行孝的君子，向後必有好處。但望你指與我那\n",
      "神仙住處，卻好拜訪去也。」樵夫道：「不遠，不遠。此山叫做靈臺方寸山，山\n",
      "中有座斜月三星洞，那洞中有一個神仙，稱名須菩提祖師。那祖師出去的徒弟，\n",
      "也不計其數，見今還有三四十人從他修行。你順那條小路兒，向南行七八里遠近\n",
      "，即是他家了。」猴王用手扯住樵夫道：「老兄，你便同我去去，若還得了好處\n",
      "，決不忘你指引之恩。」樵夫道：「你這漢子甚不通變，我方才這般與你說了，\n",
      "你還不省？假若我與你去了，卻不誤了我的生意？老母何人奉養？我要斫柴，你\n",
      "自去，自去。」\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"猴王聽說，只得相辭。出深林，找上路徑，過一山坡，約有七八里遠，果然望見\n",
      "一座洞府。挺身觀看，真好去處！但見：\n",
      "煙霞散彩，日月搖光。千株老柏，萬節修篁。千株老柏，帶雨半空青冉冉﹔萬節\n",
      "修篁，含煙一壑色蒼蒼。門外奇花佈錦，橋邊瑤草噴香。石崖突兀青苔潤，懸壁\n",
      "高張翠蘚長。時聞仙鶴唳，每見鳳凰翔。仙鶴唳時，聲振九皋霄漢遠﹔鳳凰翔起\n",
      "，翎毛五色彩雲光。玄猿白鹿隨隱見，金獅玉象任行藏。細觀靈福地，真個賽天\n",
      "堂。\n",
      "\n",
      "又見那洞門緊閉，靜悄悄杳無人跡。忽回頭，見崖頭立一石碑，約有三丈餘高，\n",
      "八尺餘闊，上有一行十個大字，乃是「靈臺方寸山，斜月三星洞」。美猴王十分\n",
      "歡喜道：「此間人果是樸實，果有此山此洞。」看勾多時，不敢敲門。且去跳上\n",
      "松枝梢頭，摘松子吃了頑耍。\n",
      "\n",
      "少頃間，只聽得呀的一聲，洞門開處，裏面走出一個仙童，真個丰姿英偉，像貌\n",
      "清奇，比尋常俗子不同。但見他：\n",
      "　　　　髽髻雙絲綰，寬袍兩袖風。\n",
      "　　　　貌和身自別，心與相俱空。\n",
      "　　　　物外長年客，山中永壽童。\n",
      "　　　　一塵全不染，甲子任翻騰。\n",
      "\n",
      "那童子出得門來，高叫道：「甚麼人在此搔擾？」猴王撲的跳下樹來，上前躬身\n",
      "道：「仙童，我是個訪道學仙之弟子，更不敢在此搔擾。」仙童笑道：「你是個\n",
      "訪道的麼？」猴王道：「是。」童子道：「我家師父正才下榻，登壇講道，還未\n",
      "說出原由，就教我出來開門。說：『外面有個修行的來了，可去接待接待。』想\n",
      "必就是你了？」猴王笑道：「是我，是我。」童子道：「你跟我進來。」\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"這猴王整衣端肅，隨童子徑入洞天深處觀看：一層層深閣瓊樓，一進進珠宮貝闕\n",
      "，說不盡那靜室幽居。直至瑤臺之下，見那菩提祖師端坐在臺上，兩邊有三十個\n",
      "小仙侍立臺下。果然是：\n",
      "大覺金仙沒垢姿，西方妙相祖菩提。不生不滅三三行，全氣全神萬萬慈。空寂自\n",
      "然隨變化，真如本性任為之。與天同壽莊嚴體，歷劫明心大法師。\n",
      "\n",
      "美猴王一見，倒身下拜，磕頭不計其數，口中只道：「師父，師父，我弟子志心\n",
      "朝禮，志心朝禮。」祖師道：「你是那方人氏？且說個鄉貫、姓名明白，再拜。」\n",
      "猴王道：「弟子乃東勝神洲傲來國花果山水簾洞人氏。」祖師喝令：「趕出去！\n",
      "他本是個撒詐搗虛之徒，那裏修甚麼道果！」猴王慌忙磕頭不住道：「弟子是老\n",
      "實之言，決無虛詐。」祖師道：「你既老實，怎麼說東勝神洲？那去處到我這裏\n",
      "隔兩重大海，一座南贍部洲，如何就得到此？」猴王叩頭道：「弟子飄洋過海，\n",
      "登界遊方，有十數個年頭，方才訪到此處。」\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n",
      "Prompt after formatting:\n",
      "\u001b[32;1m\u001b[1;3mWrite a concise summary of the following:\n",
      "\n",
      "\n",
      "\"祖師道：「既是逐漸行來的也罷。你姓甚麼？」猴王又道：「我無性。人若罵我\n",
      "，我也不惱﹔若打我，我也不嗔。只是陪個禮兒就罷了。一生無性。」祖師道：\n",
      "「不是這個性。你父母原來姓甚麼？」猴王道：「我也無父母。」祖師道：「既\n",
      "無父母，想是樹上生的？」猴王道：「我雖不是樹上生，卻是石裏長的。我只記\n",
      "得花果山上有一塊仙石，其年石破，我便生也。」祖師聞言暗喜，道：「這等說\n",
      "，卻是個天地生成的。你起來走走我看。」猴王縱身跳起，拐呀拐的走了兩遍。\n",
      "祖師笑道：「你身軀雖是鄙陋，卻像個食松果的猢猻。我與你就身上取個姓氏，\n",
      "意思教你姓『猢』。猢字去了個獸傍，乃是個古月。古者，老也﹔月者，陰也。\n",
      "老陰不能化育，教你姓『猻』倒好。猻字去了獸傍，乃是個子系。子者，兒男也﹔\n",
      "系者。嬰細也，正合嬰兒之本論。教你姓『孫』罷。」猴王聽說，滿心歡喜，朝\n",
      "上叩頭道：「好！好！好！今日方知姓也。萬望師父慈悲，既然有姓，再乞賜個\n",
      "名字，卻好呼喚。」祖師道：「我門中有十二個字，分派起名，到你乃第十輩之\n",
      "小徒矣。」猴王道：「那十二個字？」祖師道：「乃廣、大、智、慧、真、如、\n",
      "性、海、穎、悟、圓、覺十二字。排到你，正當『悟』字。與你起個法名叫做\n",
      "『孫悟空』，好麼？」猴王笑道：「好！好！好！自今就叫做孫悟空也。」正是：\n",
      "    鴻濛初闢原無姓，打破頑空須悟空。\n",
      "　　畢竟不知向後修些甚麼道果，且聽下回分解。\"\n",
      "\n",
      "\n",
      "CONCISE SUMMARY:\u001b[0m\n"
     ]
    },
    {
     "ename": "TypeError",
     "evalue": "'NoneType' object is not iterable",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
      "Cell \u001b[1;32mIn[28], line 16\u001b[0m\n\u001b[0;32m     14\u001b[0m \u001b[38;5;66;03m# There is a lot of complexity hidden in this one line. I encourage you to check out the video above for more detail\u001b[39;00m\n\u001b[0;32m     15\u001b[0m chain \u001b[38;5;241m=\u001b[39m load_summarize_chain(llm, chain_type\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmap_reduce\u001b[39m\u001b[38;5;124m\"\u001b[39m, verbose\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[1;32m---> 16\u001b[0m \u001b[43mchain\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtexts\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\_api\\deprecation.py:180\u001b[0m, in \u001b[0;36mdeprecated.<locals>.deprecate.<locals>.warning_emitting_wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m    178\u001b[0m     warned \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m    179\u001b[0m     emit_warning()\n\u001b[1;32m--> 180\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mwrapped\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\base.py:597\u001b[0m, in \u001b[0;36mChain.run\u001b[1;34m(self, callbacks, tags, metadata, *args, **kwargs)\u001b[0m\n\u001b[0;32m    595\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(args) \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m1\u001b[39m:\n\u001b[0;32m    596\u001b[0m         \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m`run` supports only one positional argument.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m--> 597\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43margs\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallbacks\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtags\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtags\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmetadata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmetadata\u001b[49m\u001b[43m)\u001b[49m[\n\u001b[0;32m    598\u001b[0m         _output_key\n\u001b[0;32m    599\u001b[0m     ]\n\u001b[0;32m    601\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m kwargs \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m args:\n\u001b[0;32m    602\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m(kwargs, callbacks\u001b[38;5;241m=\u001b[39mcallbacks, tags\u001b[38;5;241m=\u001b[39mtags, metadata\u001b[38;5;241m=\u001b[39mmetadata)[\n\u001b[0;32m    603\u001b[0m         _output_key\n\u001b[0;32m    604\u001b[0m     ]\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\_api\\deprecation.py:180\u001b[0m, in \u001b[0;36mdeprecated.<locals>.deprecate.<locals>.warning_emitting_wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m    178\u001b[0m     warned \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m    179\u001b[0m     emit_warning()\n\u001b[1;32m--> 180\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mwrapped\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\base.py:380\u001b[0m, in \u001b[0;36mChain.__call__\u001b[1;34m(self, inputs, return_only_outputs, callbacks, tags, metadata, run_name, include_run_info)\u001b[0m\n\u001b[0;32m    348\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Execute the chain.\u001b[39;00m\n\u001b[0;32m    349\u001b[0m \n\u001b[0;32m    350\u001b[0m \u001b[38;5;124;03mArgs:\u001b[39;00m\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    371\u001b[0m \u001b[38;5;124;03m        `Chain.output_keys`.\u001b[39;00m\n\u001b[0;32m    372\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m    373\u001b[0m config \u001b[38;5;241m=\u001b[39m {\n\u001b[0;32m    374\u001b[0m     \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcallbacks\u001b[39m\u001b[38;5;124m\"\u001b[39m: callbacks,\n\u001b[0;32m    375\u001b[0m     \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtags\u001b[39m\u001b[38;5;124m\"\u001b[39m: tags,\n\u001b[0;32m    376\u001b[0m     \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmetadata\u001b[39m\u001b[38;5;124m\"\u001b[39m: metadata,\n\u001b[0;32m    377\u001b[0m     \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrun_name\u001b[39m\u001b[38;5;124m\"\u001b[39m: run_name,\n\u001b[0;32m    378\u001b[0m }\n\u001b[1;32m--> 380\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43minvoke\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    381\u001b[0m \u001b[43m    \u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    382\u001b[0m \u001b[43m    \u001b[49m\u001b[43mcast\u001b[49m\u001b[43m(\u001b[49m\u001b[43mRunnableConfig\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[43mk\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43mv\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mk\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mv\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mconfig\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mitems\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mv\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mis\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mnot\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m}\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    383\u001b[0m \u001b[43m    \u001b[49m\u001b[43mreturn_only_outputs\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mreturn_only_outputs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    384\u001b[0m \u001b[43m    \u001b[49m\u001b[43minclude_run_info\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43minclude_run_info\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    385\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\base.py:163\u001b[0m, in \u001b[0;36mChain.invoke\u001b[1;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[0;32m    161\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m    162\u001b[0m     run_manager\u001b[38;5;241m.\u001b[39mon_chain_error(e)\n\u001b[1;32m--> 163\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m    164\u001b[0m run_manager\u001b[38;5;241m.\u001b[39mon_chain_end(outputs)\n\u001b[0;32m    166\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m include_run_info:\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\base.py:153\u001b[0m, in \u001b[0;36mChain.invoke\u001b[1;34m(self, input, config, **kwargs)\u001b[0m\n\u001b[0;32m    150\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m    151\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_validate_inputs(inputs)\n\u001b[0;32m    152\u001b[0m     outputs \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m--> 153\u001b[0m         \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call\u001b[49m\u001b[43m(\u001b[49m\u001b[43minputs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_manager\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    154\u001b[0m         \u001b[38;5;28;01mif\u001b[39;00m new_arg_supported\n\u001b[0;32m    155\u001b[0m         \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call(inputs)\n\u001b[0;32m    156\u001b[0m     )\n\u001b[0;32m    158\u001b[0m     final_outputs: Dict[\u001b[38;5;28mstr\u001b[39m, Any] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprep_outputs(\n\u001b[0;32m    159\u001b[0m         inputs, outputs, return_only_outputs\n\u001b[0;32m    160\u001b[0m     )\n\u001b[0;32m    161\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\combine_documents\\base.py:138\u001b[0m, in \u001b[0;36mBaseCombineDocumentsChain._call\u001b[1;34m(self, inputs, run_manager)\u001b[0m\n\u001b[0;32m    136\u001b[0m \u001b[38;5;66;03m# Other keys are assumed to be needed for LLM prediction\u001b[39;00m\n\u001b[0;32m    137\u001b[0m other_keys \u001b[38;5;241m=\u001b[39m {k: v \u001b[38;5;28;01mfor\u001b[39;00m k, v \u001b[38;5;129;01min\u001b[39;00m inputs\u001b[38;5;241m.\u001b[39mitems() \u001b[38;5;28;01mif\u001b[39;00m k \u001b[38;5;241m!=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39minput_key}\n\u001b[1;32m--> 138\u001b[0m output, extra_return_dict \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcombine_docs\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    139\u001b[0m \u001b[43m    \u001b[49m\u001b[43mdocs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m_run_manager\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_child\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mother_keys\u001b[49m\n\u001b[0;32m    140\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    141\u001b[0m extra_return_dict[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39moutput_key] \u001b[38;5;241m=\u001b[39m output\n\u001b[0;32m    142\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m extra_return_dict\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\combine_documents\\map_reduce.py:226\u001b[0m, in \u001b[0;36mMapReduceDocumentsChain.combine_docs\u001b[1;34m(self, docs, token_max, callbacks, **kwargs)\u001b[0m\n\u001b[0;32m    214\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mcombine_docs\u001b[39m(\n\u001b[0;32m    215\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    216\u001b[0m     docs: List[Document],\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    219\u001b[0m     \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[0;32m    220\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Tuple[\u001b[38;5;28mstr\u001b[39m, \u001b[38;5;28mdict\u001b[39m]:\n\u001b[0;32m    221\u001b[0m \u001b[38;5;250m    \u001b[39m\u001b[38;5;124;03m\"\"\"Combine documents in a map reduce manner.\u001b[39;00m\n\u001b[0;32m    222\u001b[0m \n\u001b[0;32m    223\u001b[0m \u001b[38;5;124;03m    Combine by mapping first chain over all documents, then reducing the results.\u001b[39;00m\n\u001b[0;32m    224\u001b[0m \u001b[38;5;124;03m    This reducing can be done recursively if needed (if there are many documents).\u001b[39;00m\n\u001b[0;32m    225\u001b[0m \u001b[38;5;124;03m    \"\"\"\u001b[39;00m\n\u001b[1;32m--> 226\u001b[0m     map_results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mllm_chain\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapply\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    227\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;66;43;03m# FYI - this is parallelized and so it is fast.\u001b[39;49;00m\n\u001b[0;32m    228\u001b[0m \u001b[43m        \u001b[49m\u001b[43m[\u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdocument_variable_name\u001b[49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43md\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpage_content\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m}\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mfor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43md\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01min\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mdocs\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    229\u001b[0m \u001b[43m        \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallbacks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    230\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    231\u001b[0m     question_result_key \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm_chain\u001b[38;5;241m.\u001b[39moutput_key\n\u001b[0;32m    232\u001b[0m     result_docs \u001b[38;5;241m=\u001b[39m [\n\u001b[0;32m    233\u001b[0m         Document(page_content\u001b[38;5;241m=\u001b[39mr[question_result_key], metadata\u001b[38;5;241m=\u001b[39mdocs[i]\u001b[38;5;241m.\u001b[39mmetadata)\n\u001b[0;32m    234\u001b[0m         \u001b[38;5;66;03m# This uses metadata from the docs, and the textual results from `results`\u001b[39;00m\n\u001b[0;32m    235\u001b[0m         \u001b[38;5;28;01mfor\u001b[39;00m i, r \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28menumerate\u001b[39m(map_results)\n\u001b[0;32m    236\u001b[0m     ]\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\llm.py:249\u001b[0m, in \u001b[0;36mLLMChain.apply\u001b[1;34m(self, input_list, callbacks)\u001b[0m\n\u001b[0;32m    247\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m    248\u001b[0m     run_manager\u001b[38;5;241m.\u001b[39mon_chain_error(e)\n\u001b[1;32m--> 249\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m    250\u001b[0m outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcreate_outputs(response)\n\u001b[0;32m    251\u001b[0m run_manager\u001b[38;5;241m.\u001b[39mon_chain_end({\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124moutputs\u001b[39m\u001b[38;5;124m\"\u001b[39m: outputs})\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\llm.py:246\u001b[0m, in \u001b[0;36mLLMChain.apply\u001b[1;34m(self, input_list, callbacks)\u001b[0m\n\u001b[0;32m    241\u001b[0m run_manager \u001b[38;5;241m=\u001b[39m callback_manager\u001b[38;5;241m.\u001b[39mon_chain_start(\n\u001b[0;32m    242\u001b[0m     \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m    243\u001b[0m     {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124minput_list\u001b[39m\u001b[38;5;124m\"\u001b[39m: input_list},\n\u001b[0;32m    244\u001b[0m )\n\u001b[0;32m    245\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m--> 246\u001b[0m     response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\u001b[43minput_list\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_manager\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    247\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m    248\u001b[0m     run_manager\u001b[38;5;241m.\u001b[39mon_chain_error(e)\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain\\chains\\llm.py:137\u001b[0m, in \u001b[0;36mLLMChain.generate\u001b[1;34m(self, input_list, run_manager)\u001b[0m\n\u001b[0;32m    135\u001b[0m callbacks \u001b[38;5;241m=\u001b[39m run_manager\u001b[38;5;241m.\u001b[39mget_child() \u001b[38;5;28;01mif\u001b[39;00m run_manager \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m    136\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm, BaseLanguageModel):\n\u001b[1;32m--> 137\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mllm\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate_prompt\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    138\u001b[0m \u001b[43m        \u001b[49m\u001b[43mprompts\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    139\u001b[0m \u001b[43m        \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    140\u001b[0m \u001b[43m        \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallbacks\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    141\u001b[0m \u001b[43m        \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mllm_kwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    142\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    143\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m    144\u001b[0m     results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm\u001b[38;5;241m.\u001b[39mbind(stop\u001b[38;5;241m=\u001b[39mstop, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mllm_kwargs)\u001b[38;5;241m.\u001b[39mbatch(\n\u001b[0;32m    145\u001b[0m         cast(List, prompts), {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcallbacks\u001b[39m\u001b[38;5;124m\"\u001b[39m: callbacks}\n\u001b[0;32m    146\u001b[0m     )\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:750\u001b[0m, in \u001b[0;36mBaseLLM.generate_prompt\u001b[1;34m(self, prompts, stop, callbacks, **kwargs)\u001b[0m\n\u001b[0;32m    742\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mgenerate_prompt\u001b[39m(\n\u001b[0;32m    743\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    744\u001b[0m     prompts: List[PromptValue],\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    747\u001b[0m     \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[0;32m    748\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m LLMResult:\n\u001b[0;32m    749\u001b[0m     prompt_strings \u001b[38;5;241m=\u001b[39m [p\u001b[38;5;241m.\u001b[39mto_string() \u001b[38;5;28;01mfor\u001b[39;00m p \u001b[38;5;129;01min\u001b[39;00m prompts]\n\u001b[1;32m--> 750\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgenerate\u001b[49m\u001b[43m(\u001b[49m\u001b[43mprompt_strings\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcallbacks\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mcallbacks\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:944\u001b[0m, in \u001b[0;36mBaseLLM.generate\u001b[1;34m(self, prompts, stop, callbacks, tags, metadata, run_name, run_id, **kwargs)\u001b[0m\n\u001b[0;32m    929\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m get_llm_cache() \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m) \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcache \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mFalse\u001b[39;00m:\n\u001b[0;32m    930\u001b[0m     run_managers \u001b[38;5;241m=\u001b[39m [\n\u001b[0;32m    931\u001b[0m         callback_manager\u001b[38;5;241m.\u001b[39mon_llm_start(\n\u001b[0;32m    932\u001b[0m             dumpd(\u001b[38;5;28mself\u001b[39m),\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    942\u001b[0m         )\n\u001b[0;32m    943\u001b[0m     ]\n\u001b[1;32m--> 944\u001b[0m     output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_generate_helper\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    945\u001b[0m \u001b[43m        \u001b[49m\u001b[43mprompts\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_managers\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mbool\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mnew_arg_supported\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\n\u001b[0;32m    946\u001b[0m \u001b[43m    \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    947\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m output\n\u001b[0;32m    948\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(missing_prompts) \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m:\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:787\u001b[0m, in \u001b[0;36mBaseLLM._generate_helper\u001b[1;34m(self, prompts, stop, run_managers, new_arg_supported, **kwargs)\u001b[0m\n\u001b[0;32m    785\u001b[0m     \u001b[38;5;28;01mfor\u001b[39;00m run_manager \u001b[38;5;129;01min\u001b[39;00m run_managers:\n\u001b[0;32m    786\u001b[0m         run_manager\u001b[38;5;241m.\u001b[39mon_llm_error(e, response\u001b[38;5;241m=\u001b[39mLLMResult(generations\u001b[38;5;241m=\u001b[39m[]))\n\u001b[1;32m--> 787\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m    788\u001b[0m flattened_outputs \u001b[38;5;241m=\u001b[39m output\u001b[38;5;241m.\u001b[39mflatten()\n\u001b[0;32m    789\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m manager, flattened_output \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mzip\u001b[39m(run_managers, flattened_outputs):\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_core\\language_models\\llms.py:774\u001b[0m, in \u001b[0;36mBaseLLM._generate_helper\u001b[1;34m(self, prompts, stop, run_managers, new_arg_supported, **kwargs)\u001b[0m\n\u001b[0;32m    764\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_generate_helper\u001b[39m(\n\u001b[0;32m    765\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m    766\u001b[0m     prompts: List[\u001b[38;5;28mstr\u001b[39m],\n\u001b[1;32m   (...)\u001b[0m\n\u001b[0;32m    770\u001b[0m     \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs: Any,\n\u001b[0;32m    771\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m LLMResult:\n\u001b[0;32m    772\u001b[0m     \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m    773\u001b[0m         output \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m--> 774\u001b[0m             \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_generate\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m    775\u001b[0m \u001b[43m                \u001b[49m\u001b[43mprompts\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    776\u001b[0m \u001b[43m                \u001b[49m\u001b[43mstop\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstop\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    777\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;66;43;03m# TODO: support multiple run managers\u001b[39;49;00m\n\u001b[0;32m    778\u001b[0m \u001b[43m                \u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_managers\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;241;43m0\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mrun_managers\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m,\u001b[49m\n\u001b[0;32m    779\u001b[0m \u001b[43m                \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m    780\u001b[0m \u001b[43m            \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    781\u001b[0m             \u001b[38;5;28;01mif\u001b[39;00m new_arg_supported\n\u001b[0;32m    782\u001b[0m             \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_generate(prompts, stop\u001b[38;5;241m=\u001b[39mstop)\n\u001b[0;32m    783\u001b[0m         )\n\u001b[0;32m    784\u001b[0m     \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mBaseException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m    785\u001b[0m         \u001b[38;5;28;01mfor\u001b[39;00m run_manager \u001b[38;5;129;01min\u001b[39;00m run_managers:\n",
      "File \u001b[1;32mD:\\CacheData\\anaconda\\envs\\hanlp-python38\\lib\\site-packages\\langchain_community\\llms\\openai.py:471\u001b[0m, in \u001b[0;36mBaseOpenAI._generate\u001b[1;34m(self, prompts, stop, run_manager, **kwargs)\u001b[0m\n\u001b[0;32m    466\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(response, \u001b[38;5;28mdict\u001b[39m):\n\u001b[0;32m    467\u001b[0m     \u001b[38;5;66;03m# V1 client returns the response in an PyDantic object instead of\u001b[39;00m\n\u001b[0;32m    468\u001b[0m     \u001b[38;5;66;03m# dict. For the transition period, we deep convert it to dict.\u001b[39;00m\n\u001b[0;32m    469\u001b[0m     response \u001b[38;5;241m=\u001b[39m response\u001b[38;5;241m.\u001b[39mdict()\n\u001b[1;32m--> 471\u001b[0m \u001b[43mchoices\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mextend\u001b[49m\u001b[43m(\u001b[49m\u001b[43mresponse\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mchoices\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m    472\u001b[0m update_token_usage(_keys, response, token_usage)\n\u001b[0;32m    473\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m system_fingerprint:\n",
      "\u001b[1;31mTypeError\u001b[0m: 'NoneType' object is not iterable"
     ]
    }
   ],
   "source": [
    "from langchain.chains.summarize import load_summarize_chain\n",
    "from langchain.document_loaders import TextLoader\n",
    "from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
    "\n",
    "loader = TextLoader('./data/xiyouji-part1.txt', encoding='utf-8')\n",
    "documents = loader.load()\n",
    "\n",
    "# Get your splitter ready\n",
    "text_splitter = RecursiveCharacterTextSplitter(chunk_size=700, chunk_overlap=50)\n",
    "\n",
    "# Split your docs into texts\n",
    "texts = text_splitter.split_documents(documents)\n",
    "\n",
    "# There is a lot of complexity hidden in this one line. I encourage you to check out the video above for more detail\n",
    "chain = load_summarize_chain(llm, chain_type=\"map_reduce\", verbose=True)\n",
    "chain.run(texts)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "8b2da775-0d0c-4001-9c80-c8b8d225892a",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3.8",
   "language": "python",
   "name": "py38"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.20"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
