{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "78d96465-ef77-4b89-be2c-e88cd48f2489",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:37:05.938445Z",
     "iopub.status.busy": "2024-01-30T06:37:05.938136Z",
     "iopub.status.idle": "2024-01-30T06:37:26.554435Z",
     "shell.execute_reply": "2024-01-30T06:37:26.553906Z",
     "shell.execute_reply.started": "2024-01-30T06:37:05.938419Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: https://mirrors.aliyun.com/pypi/simple\n",
      "Collecting langchain\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/23/9f/a78357793c96ae5b53b5a31a891ed2fe3b02dc1a11a705dd14da67932c42/langchain-0.1.4-py3-none-any.whl (803 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m803.6/803.6 kB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: PyYAML>=5.3 in /opt/conda/lib/python3.10/site-packages (from langchain) (6.0.1)\n",
      "Collecting SQLAlchemy<3,>=1.4 (from langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/2c/e6/967cd898cbce485c385d4cd644195f906b2571f9393dc1537019a821a8a6/SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (3.1 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: aiohttp<4.0.0,>=3.8.3 in /opt/conda/lib/python3.10/site-packages (from langchain) (3.9.1)\n",
      "Requirement already satisfied: async-timeout<5.0.0,>=4.0.0 in /opt/conda/lib/python3.10/site-packages (from langchain) (4.0.3)\n",
      "Collecting dataclasses-json<0.7,>=0.5.7 (from langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/ae/53/8c006de775834cd4ea64a445402dc195caeebb77dc76b7defb9b3887cb0d/dataclasses_json-0.6.3-py3-none-any.whl (28 kB)\n",
      "Collecting jsonpatch<2.0,>=1.33 (from langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl (12 kB)\n",
      "Collecting langchain-community<0.1,>=0.0.14 (from langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/57/00/a798f8124db57eb9e20fe31dc7561e15e9c4607281cddaa4db49f93d7111/langchain_community-0.0.16-py3-none-any.whl (1.6 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.6/1.6 MB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hCollecting langchain-core<0.2,>=0.1.16 (from langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/e6/7f/2c5006e2292bbcc9fc7cfaac407954c00fc8c9f5afd4e62c17adc0ba1790/langchain_core-0.1.17-py3-none-any.whl (235 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m235.9/235.9 kB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hCollecting langsmith<0.1,>=0.0.83 (from langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/61/f2/45e032811f39cfe79d49935d653f531dbe17c60b30639d4bb6f0cf09d26d/langsmith-0.0.84-py3-none-any.whl (53 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m54.0/54.0 kB\u001b[0m \u001b[31m2.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: numpy<2,>=1 in /opt/conda/lib/python3.10/site-packages (from langchain) (1.26.3)\n",
      "Requirement already satisfied: pydantic<3,>=1 in /opt/conda/lib/python3.10/site-packages (from langchain) (2.5.3)\n",
      "Requirement already satisfied: requests<3,>=2 in /opt/conda/lib/python3.10/site-packages (from langchain) (2.31.0)\n",
      "Requirement already satisfied: tenacity<9.0.0,>=8.1.0 in /opt/conda/lib/python3.10/site-packages (from langchain) (8.2.3)\n",
      "Requirement already satisfied: attrs>=17.3.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (23.2.0)\n",
      "Requirement already satisfied: multidict<7.0,>=4.5 in /opt/conda/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (6.0.4)\n",
      "Requirement already satisfied: yarl<2.0,>=1.0 in /opt/conda/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.9.4)\n",
      "Requirement already satisfied: frozenlist>=1.1.1 in /opt/conda/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.4.1)\n",
      "Requirement already satisfied: aiosignal>=1.1.2 in /opt/conda/lib/python3.10/site-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.3.1)\n",
      "Collecting marshmallow<4.0.0,>=3.18.0 (from dataclasses-json<0.7,>=0.5.7->langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/57/e9/4368d49d3b462da16a3bac976487764a84dd85cef97232c7bd61f5bdedf3/marshmallow-3.20.2-py3-none-any.whl (49 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m49.4/49.4 kB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting typing-inspect<1,>=0.4.0 (from dataclasses-json<0.7,>=0.5.7->langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl (8.8 kB)\n",
      "Requirement already satisfied: jsonpointer>=1.9 in /opt/conda/lib/python3.10/site-packages (from jsonpatch<2.0,>=1.33->langchain) (2.1)\n",
      "Requirement already satisfied: anyio<5,>=3 in /opt/conda/lib/python3.10/site-packages (from langchain-core<0.2,>=0.1.16->langchain) (4.2.0)\n",
      "Collecting packaging<24.0,>=23.2 (from langchain-core<0.2,>=0.1.16->langchain)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl (53 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m53.0/53.0 kB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: annotated-types>=0.4.0 in /opt/conda/lib/python3.10/site-packages (from pydantic<3,>=1->langchain) (0.6.0)\n",
      "Requirement already satisfied: pydantic-core==2.14.6 in /opt/conda/lib/python3.10/site-packages (from pydantic<3,>=1->langchain) (2.14.6)\n",
      "Requirement already satisfied: typing-extensions>=4.6.1 in /opt/conda/lib/python3.10/site-packages (from pydantic<3,>=1->langchain) (4.9.0)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/conda/lib/python3.10/site-packages (from requests<3,>=2->langchain) (2.0.4)\n",
      "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.10/site-packages (from requests<3,>=2->langchain) (3.4)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/conda/lib/python3.10/site-packages (from requests<3,>=2->langchain) (1.26.16)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.10/site-packages (from requests<3,>=2->langchain) (2023.11.17)\n",
      "Requirement already satisfied: greenlet!=0.4.17 in /opt/conda/lib/python3.10/site-packages (from SQLAlchemy<3,>=1.4->langchain) (3.0.3)\n",
      "Requirement already satisfied: sniffio>=1.1 in /opt/conda/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1.16->langchain) (1.3.0)\n",
      "Requirement already satisfied: exceptiongroup>=1.0.2 in /opt/conda/lib/python3.10/site-packages (from anyio<5,>=3->langchain-core<0.2,>=0.1.16->langchain) (1.2.0)\n",
      "Requirement already satisfied: mypy-extensions>=0.3.0 in /opt/conda/lib/python3.10/site-packages (from typing-inspect<1,>=0.4.0->dataclasses-json<0.7,>=0.5.7->langchain) (1.0.0)\n",
      "\u001b[33mDEPRECATION: omegaconf 2.0.6 has a non-standard dependency specifier PyYAML>=5.1.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of omegaconf or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0m\u001b[33mDEPRECATION: pytorch-lightning 1.7.7 has a non-standard dependency specifier torch>=1.9.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of pytorch-lightning or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0mInstalling collected packages: typing-inspect, SQLAlchemy, packaging, jsonpatch, marshmallow, langsmith, dataclasses-json, langchain-core, langchain-community, langchain\n",
      "  Attempting uninstall: packaging\n",
      "    Found existing installation: packaging 23.1\n",
      "    Uninstalling packaging-23.1:\n",
      "      Successfully uninstalled packaging-23.1\n",
      "  Attempting uninstall: jsonpatch\n",
      "    Found existing installation: jsonpatch 1.32\n",
      "    Uninstalling jsonpatch-1.32:\n",
      "      Successfully uninstalled jsonpatch-1.32\n",
      "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n",
      "conda 23.9.0 requires ruamel-yaml<0.18,>=0.11.14, but you have ruamel-yaml 0.18.5 which is incompatible.\n",
      "tensorflow 2.14.0 requires tensorboard<2.15,>=2.14, but you have tensorboard 2.15.1 which is incompatible.\u001b[0m\u001b[31m\n",
      "\u001b[0mSuccessfully installed SQLAlchemy-2.0.25 dataclasses-json-0.6.3 jsonpatch-1.33 langchain-0.1.4 langchain-community-0.0.16 langchain-core-0.1.17 langsmith-0.0.84 marshmallow-3.20.2 packaging-23.2 typing-inspect-0.9.0\n",
      "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
      "\u001b[0mLooking in indexes: https://mirrors.aliyun.com/pypi/simple\n",
      "Collecting openai\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/46/85/8681046cd9cc13a36ac76e4a1b047338c90dbeab2e9b14fb36de7f314c93/openai-1.10.0-py3-none-any.whl (225 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m225.1/225.1 kB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: anyio<5,>=3.5.0 in /opt/conda/lib/python3.10/site-packages (from openai) (4.2.0)\n",
      "Collecting distro<2,>=1.7.0 (from openai)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl (20 kB)\n",
      "Collecting httpx<1,>=0.23.0 (from openai)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/39/9b/4937d841aee9c2c8102d9a4eeb800c7dad25386caabb4a1bf5010df81a57/httpx-0.26.0-py3-none-any.whl (75 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.9/75.9 kB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: pydantic<3,>=1.9.0 in /opt/conda/lib/python3.10/site-packages (from openai) (2.5.3)\n",
      "Requirement already satisfied: sniffio in /opt/conda/lib/python3.10/site-packages (from openai) (1.3.0)\n",
      "Requirement already satisfied: tqdm>4 in /opt/conda/lib/python3.10/site-packages (from openai) (4.65.0)\n",
      "Requirement already satisfied: typing-extensions<5,>=4.7 in /opt/conda/lib/python3.10/site-packages (from openai) (4.9.0)\n",
      "Requirement already satisfied: idna>=2.8 in /opt/conda/lib/python3.10/site-packages (from anyio<5,>=3.5.0->openai) (3.4)\n",
      "Requirement already satisfied: exceptiongroup>=1.0.2 in /opt/conda/lib/python3.10/site-packages (from anyio<5,>=3.5.0->openai) (1.2.0)\n",
      "Requirement already satisfied: certifi in /opt/conda/lib/python3.10/site-packages (from httpx<1,>=0.23.0->openai) (2023.11.17)\n",
      "Collecting httpcore==1.* (from httpx<1,>=0.23.0->openai)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/56/ba/78b0a99c4da0ff8b0f59defa2f13ca4668189b134bd9840b6202a93d9a0f/httpcore-1.0.2-py3-none-any.whl (76 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m76.9/76.9 kB\u001b[0m \u001b[31m1.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: h11<0.15,>=0.13 in /opt/conda/lib/python3.10/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai) (0.14.0)\n",
      "Requirement already satisfied: annotated-types>=0.4.0 in /opt/conda/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai) (0.6.0)\n",
      "Requirement already satisfied: pydantic-core==2.14.6 in /opt/conda/lib/python3.10/site-packages (from pydantic<3,>=1.9.0->openai) (2.14.6)\n",
      "\u001b[33mDEPRECATION: omegaconf 2.0.6 has a non-standard dependency specifier PyYAML>=5.1.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of omegaconf or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0m\u001b[33mDEPRECATION: pytorch-lightning 1.7.7 has a non-standard dependency specifier torch>=1.9.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of pytorch-lightning or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0mInstalling collected packages: httpcore, distro, httpx, openai\n",
      "Successfully installed distro-1.9.0 httpcore-1.0.2 httpx-0.26.0 openai-1.10.0\n",
      "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
      "\u001b[0mLooking in indexes: https://mirrors.aliyun.com/pypi/simple\n",
      "Collecting wikipedia\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/67/35/25e68fbc99e672127cc6fbb14b8ec1ba3dfef035bf1e4c90f78f24a80b7d/wikipedia-1.4.0.tar.gz (27 kB)\n",
      "  Preparing metadata (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25hCollecting beautifulsoup4 (from wikipedia)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl (147 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m147.9/147.9 kB\u001b[0m \u001b[31m1.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0ma \u001b[36m0:00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: requests<3.0.0,>=2.0.0 in /opt/conda/lib/python3.10/site-packages (from wikipedia) (2.31.0)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in /opt/conda/lib/python3.10/site-packages (from requests<3.0.0,>=2.0.0->wikipedia) (2.0.4)\n",
      "Requirement already satisfied: idna<4,>=2.5 in /opt/conda/lib/python3.10/site-packages (from requests<3.0.0,>=2.0.0->wikipedia) (3.4)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/conda/lib/python3.10/site-packages (from requests<3.0.0,>=2.0.0->wikipedia) (1.26.16)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in /opt/conda/lib/python3.10/site-packages (from requests<3.0.0,>=2.0.0->wikipedia) (2023.11.17)\n",
      "Collecting soupsieve>1.2 (from beautifulsoup4->wikipedia)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/4c/f3/038b302fdfbe3be7da016777069f26ceefe11a681055ea1f7817546508e3/soupsieve-2.5-py3-none-any.whl (36 kB)\n",
      "Building wheels for collected packages: wikipedia\n",
      "  Building wheel for wikipedia (setup.py) ... \u001b[?25ldone\n",
      "\u001b[?25h  Created wheel for wikipedia: filename=wikipedia-1.4.0-py3-none-any.whl size=11690 sha256=4670bc03deded2b438bfc0782853b8e19c3a8d651cc9d2f605ec8fe4a67dd09e\n",
      "  Stored in directory: /root/.cache/pip/wheels/6c/87/4d/510e9349cbe5936a7578ef86d42ff1f762959b33eb34d37593\n",
      "Successfully built wikipedia\n",
      "\u001b[33mDEPRECATION: omegaconf 2.0.6 has a non-standard dependency specifier PyYAML>=5.1.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of omegaconf or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0m\u001b[33mDEPRECATION: pytorch-lightning 1.7.7 has a non-standard dependency specifier torch>=1.9.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of pytorch-lightning or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0mInstalling collected packages: soupsieve, beautifulsoup4, wikipedia\n",
      "Successfully installed beautifulsoup4-4.12.3 soupsieve-2.5 wikipedia-1.4.0\n",
      "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
      "\u001b[0m"
     ]
    }
   ],
   "source": [
    "!pip install langchain\n",
    "!pip install openai\n",
    "!pip install wikipedia"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "01c6598e-c339-4616-9e32-acf331c24cc9",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:37:34.311832Z",
     "iopub.status.busy": "2024-01-30T06:37:34.311505Z",
     "iopub.status.idle": "2024-01-30T06:37:35.685468Z",
     "shell.execute_reply": "2024-01-30T06:37:35.685001Z",
     "shell.execute_reply.started": "2024-01-30T06:37:34.311790Z"
    }
   },
   "outputs": [],
   "source": [
    "import json\n",
    "from langchain.agents import load_tools\n",
    "from langchain.agents import initialize_agent\n",
    "from langchain.utilities import WikipediaAPIWrapper\n",
    "from langchain.llms import OpenAI\n",
    "from langchain.agents import AgentType\n",
    "from langchain.utilities import SerpAPIWrapper\n",
    "from langchain.utilities import GoogleSerperAPIWrapper\n",
    "from langchain.utilities import GoogleSearchAPIWrapper\n",
    "from langchain.llms.openai import OpenAI\n",
    "from langchain.chat_models import ChatOpenAI\n",
    "from langchain.agents import initialize_agent, Tool\n",
    "from langchain.agents import AgentType"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "id": "fcb69168-ffa3-4090-8198-7a0cc35987bf",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:37:49.277598Z",
     "iopub.status.busy": "2024-01-30T06:37:49.277213Z",
     "iopub.status.idle": "2024-01-30T06:37:49.280550Z",
     "shell.execute_reply": "2024-01-30T06:37:49.280156Z",
     "shell.execute_reply.started": "2024-01-30T06:37:49.277578Z"
    }
   },
   "outputs": [],
   "source": [
    "def top5_results(query):\n",
    "    search = GoogleSearchAPIWrapper()\n",
    "    return search.results(query, 5)\n",
    "def linkSearch(query):\n",
    "  res = top5_results(query)\n",
    "  return json.dumps(res, ensure_ascii=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "95009d67-a7f6-42ae-bba9-e66616713f23",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:37:55.602337Z",
     "iopub.status.busy": "2024-01-30T06:37:55.602037Z",
     "iopub.status.idle": "2024-01-30T06:37:55.814367Z",
     "shell.execute_reply": "2024-01-30T06:37:55.813819Z",
     "shell.execute_reply.started": "2024-01-30T06:37:55.602319Z"
    }
   },
   "outputs": [
    {
     "ename": "ValidationError",
     "evalue": "1 validation error for GoogleSearchAPIWrapper\n__root__\n  Did not find google_api_key, please add an environment variable `GOOGLE_API_KEY` which contains it, or pass `google_api_key` as a named parameter. (type=value_error)",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mValidationError\u001b[0m                           Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[4], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mlinkSearch\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43m宁波霍科电器有限公司\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n",
      "Cell \u001b[0;32mIn[3], line 5\u001b[0m, in \u001b[0;36mlinkSearch\u001b[0;34m(query)\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mlinkSearch\u001b[39m(query):\n\u001b[0;32m----> 5\u001b[0m   res \u001b[38;5;241m=\u001b[39m \u001b[43mtop5_results\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m      6\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m json\u001b[38;5;241m.\u001b[39mdumps(res, ensure_ascii\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m)\n",
      "Cell \u001b[0;32mIn[3], line 2\u001b[0m, in \u001b[0;36mtop5_results\u001b[0;34m(query)\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mtop5_results\u001b[39m(query):\n\u001b[0;32m----> 2\u001b[0m     search \u001b[38;5;241m=\u001b[39m \u001b[43mGoogleSearchAPIWrapper\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m      3\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m search\u001b[38;5;241m.\u001b[39mresults(query, \u001b[38;5;241m5\u001b[39m)\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/pydantic/v1/main.py:341\u001b[0m, in \u001b[0;36mBaseModel.__init__\u001b[0;34m(__pydantic_self__, **data)\u001b[0m\n\u001b[1;32m    339\u001b[0m values, fields_set, validation_error \u001b[38;5;241m=\u001b[39m validate_model(__pydantic_self__\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__class__\u001b[39m, data)\n\u001b[1;32m    340\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m validation_error:\n\u001b[0;32m--> 341\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m validation_error\n\u001b[1;32m    342\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m    343\u001b[0m     object_setattr(__pydantic_self__, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124m__dict__\u001b[39m\u001b[38;5;124m'\u001b[39m, values)\n",
      "\u001b[0;31mValidationError\u001b[0m: 1 validation error for GoogleSearchAPIWrapper\n__root__\n  Did not find google_api_key, please add an environment variable `GOOGLE_API_KEY` which contains it, or pass `google_api_key` as a named parameter. (type=value_error)"
     ]
    }
   ],
   "source": [
    "linkSearch('宁波霍科电器有限公司')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "fcdd3339-c797-48ec-934e-c007e37e30cb",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:38:18.682397Z",
     "iopub.status.busy": "2024-01-30T06:38:18.682087Z",
     "iopub.status.idle": "2024-01-30T06:38:18.685544Z",
     "shell.execute_reply": "2024-01-30T06:38:18.685083Z",
     "shell.execute_reply.started": "2024-01-30T06:38:18.682378Z"
    }
   },
   "outputs": [],
   "source": [
    "import os\n",
    "os.environ[\"OPENAI_API_KEY\"] = \"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\"\n",
    "os.environ[\"GOOGLE_CSE_ID\"] = \"e55e9cb4a93244943\"\n",
    "os.environ[\"GOOGLE_API_KEY\"] = \"AIzaSyDTGUCBZsKa4f_JOXzPOzFTTygz_LpRGqQ\"\n",
    "os.environ[\"SERPER_API_KEY\"] = \"041BCEF52DCC45A5929F7FB8CFDA3194\"\n",
    "os.environ[\"SERPAPI_API_KEY\"] = \"041BCEF52DCC45A5929F7FB8CFDA3194​\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "af22d0e2-a9bc-41d9-830d-b707fc999e93",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:38:24.957199Z",
     "iopub.status.busy": "2024-01-30T06:38:24.956895Z",
     "iopub.status.idle": "2024-01-30T06:38:25.025089Z",
     "shell.execute_reply": "2024-01-30T06:38:25.024489Z",
     "shell.execute_reply.started": "2024-01-30T06:38:24.957181Z"
    }
   },
   "outputs": [
    {
     "ename": "ImportError",
     "evalue": "google-api-python-client is not installed. Please install it with `pip install google-api-python-client>=2.100.0`",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mModuleNotFoundError\u001b[0m                       Traceback (most recent call last)",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_community/utilities/google_search.py:77\u001b[0m, in \u001b[0;36mGoogleSearchAPIWrapper.validate_environment\u001b[0;34m(cls, values)\u001b[0m\n\u001b[1;32m     76\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 77\u001b[0m     \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mgoogleapiclient\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdiscovery\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m build\n\u001b[1;32m     79\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m:\n",
      "\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'googleapiclient'",
      "\nDuring handling of the above exception, another exception occurred:\n",
      "\u001b[0;31mImportError\u001b[0m                               Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[6], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mlinkSearch\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43m宁波霍科电器有限公司\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n",
      "Cell \u001b[0;32mIn[3], line 5\u001b[0m, in \u001b[0;36mlinkSearch\u001b[0;34m(query)\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mlinkSearch\u001b[39m(query):\n\u001b[0;32m----> 5\u001b[0m   res \u001b[38;5;241m=\u001b[39m \u001b[43mtop5_results\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m      6\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m json\u001b[38;5;241m.\u001b[39mdumps(res, ensure_ascii\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m)\n",
      "Cell \u001b[0;32mIn[3], line 2\u001b[0m, in \u001b[0;36mtop5_results\u001b[0;34m(query)\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mtop5_results\u001b[39m(query):\n\u001b[0;32m----> 2\u001b[0m     search \u001b[38;5;241m=\u001b[39m \u001b[43mGoogleSearchAPIWrapper\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m      3\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m search\u001b[38;5;241m.\u001b[39mresults(query, \u001b[38;5;241m5\u001b[39m)\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/pydantic/v1/main.py:339\u001b[0m, in \u001b[0;36mBaseModel.__init__\u001b[0;34m(__pydantic_self__, **data)\u001b[0m\n\u001b[1;32m    333\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m    334\u001b[0m \u001b[38;5;124;03mCreate a new model by parsing and validating input data from keyword arguments.\u001b[39;00m\n\u001b[1;32m    335\u001b[0m \n\u001b[1;32m    336\u001b[0m \u001b[38;5;124;03mRaises ValidationError if the input data cannot be parsed to form a valid model.\u001b[39;00m\n\u001b[1;32m    337\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m    338\u001b[0m \u001b[38;5;66;03m# Uses something other than `self` the first arg to allow \"self\" as a settable attribute\u001b[39;00m\n\u001b[0;32m--> 339\u001b[0m values, fields_set, validation_error \u001b[38;5;241m=\u001b[39m \u001b[43mvalidate_model\u001b[49m\u001b[43m(\u001b[49m\u001b[43m__pydantic_self__\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;18;43m__class__\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    340\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m validation_error:\n\u001b[1;32m    341\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m validation_error\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/pydantic/v1/main.py:1102\u001b[0m, in \u001b[0;36mvalidate_model\u001b[0;34m(model, input_data, cls)\u001b[0m\n\u001b[1;32m   1100\u001b[0m     \u001b[38;5;28;01mcontinue\u001b[39;00m\n\u001b[1;32m   1101\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m-> 1102\u001b[0m     values \u001b[38;5;241m=\u001b[39m \u001b[43mvalidator\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcls_\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvalues\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1103\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mValueError\u001b[39;00m, \u001b[38;5;167;01mTypeError\u001b[39;00m, \u001b[38;5;167;01mAssertionError\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m exc:\n\u001b[1;32m   1104\u001b[0m     errors\u001b[38;5;241m.\u001b[39mappend(ErrorWrapper(exc, loc\u001b[38;5;241m=\u001b[39mROOT_KEY))\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_community/utilities/google_search.py:80\u001b[0m, in \u001b[0;36mGoogleSearchAPIWrapper.validate_environment\u001b[0;34m(cls, values)\u001b[0m\n\u001b[1;32m     77\u001b[0m     \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mgoogleapiclient\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdiscovery\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m build\n\u001b[1;32m     79\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m:\n\u001b[0;32m---> 80\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mImportError\u001b[39;00m(\n\u001b[1;32m     81\u001b[0m         \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mgoogle-api-python-client is not installed. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m     82\u001b[0m         \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPlease install it with `pip install google-api-python-client\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m     83\u001b[0m         \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m>=2.100.0`\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m     84\u001b[0m     )\n\u001b[1;32m     86\u001b[0m service \u001b[38;5;241m=\u001b[39m build(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcustomsearch\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mv1\u001b[39m\u001b[38;5;124m\"\u001b[39m, developerKey\u001b[38;5;241m=\u001b[39mgoogle_api_key)\n\u001b[1;32m     87\u001b[0m values[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msearch_engine\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m service\n",
      "\u001b[0;31mImportError\u001b[0m: google-api-python-client is not installed. Please install it with `pip install google-api-python-client>=2.100.0`"
     ]
    }
   ],
   "source": [
    "linkSearch('宁波霍科电器有限公司')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "45aa09db-5c1e-4e81-aa21-bb847bf96b75",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:38:40.317282Z",
     "iopub.status.busy": "2024-01-30T06:38:40.316962Z",
     "iopub.status.idle": "2024-01-30T06:38:53.128681Z",
     "shell.execute_reply": "2024-01-30T06:38:53.128104Z",
     "shell.execute_reply.started": "2024-01-30T06:38:40.317264Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\u001b[33mDEPRECATION: omegaconf 2.0.6 has a non-standard dependency specifier PyYAML>=5.1.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of omegaconf or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0m\u001b[33mDEPRECATION: pytorch-lightning 1.7.7 has a non-standard dependency specifier torch>=1.9.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of pytorch-lightning or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0m\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
      "\u001b[0m"
     ]
    }
   ],
   "source": [
    "!pip install google-api-python-client>=2.100.0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "id": "b7aeeb98-0f59-4321-a1c1-115edd359eb8",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T06:45:08.193218Z",
     "iopub.status.busy": "2024-01-30T06:45:08.192900Z",
     "iopub.status.idle": "2024-01-30T06:45:40.246842Z",
     "shell.execute_reply": "2024-01-30T06:45:40.246056Z",
     "shell.execute_reply.started": "2024-01-30T06:45:08.193200Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "ename": "TimeoutError",
     "evalue": "[Errno 110] Connection timed out",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mTimeoutError\u001b[0m                              Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[10], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mlinkSearch\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43m宁波霍科电器有限公司\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n",
      "Cell \u001b[0;32mIn[3], line 5\u001b[0m, in \u001b[0;36mlinkSearch\u001b[0;34m(query)\u001b[0m\n\u001b[1;32m      4\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mlinkSearch\u001b[39m(query):\n\u001b[0;32m----> 5\u001b[0m   res \u001b[38;5;241m=\u001b[39m \u001b[43mtop5_results\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m      6\u001b[0m   \u001b[38;5;28;01mreturn\u001b[39;00m json\u001b[38;5;241m.\u001b[39mdumps(res, ensure_ascii\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m)\n",
      "Cell \u001b[0;32mIn[3], line 3\u001b[0m, in \u001b[0;36mtop5_results\u001b[0;34m(query)\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mtop5_results\u001b[39m(query):\n\u001b[1;32m      2\u001b[0m     search \u001b[38;5;241m=\u001b[39m GoogleSearchAPIWrapper()\n\u001b[0;32m----> 3\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43msearch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mresults\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m5\u001b[39;49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_community/utilities/google_search.py:123\u001b[0m, in \u001b[0;36mGoogleSearchAPIWrapper.results\u001b[0;34m(self, query, num_results, search_params)\u001b[0m\n\u001b[1;32m    109\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Run query through GoogleSearch and return metadata.\u001b[39;00m\n\u001b[1;32m    110\u001b[0m \n\u001b[1;32m    111\u001b[0m \u001b[38;5;124;03mArgs:\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    120\u001b[0m \u001b[38;5;124;03m        link - The link to the result.\u001b[39;00m\n\u001b[1;32m    121\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m    122\u001b[0m metadata_results \u001b[38;5;241m=\u001b[39m []\n\u001b[0;32m--> 123\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_google_search_results\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    124\u001b[0m \u001b[43m    \u001b[49m\u001b[43mquery\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mnum_results\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43msearch_params\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mor\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[43m}\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    125\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    126\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(results) \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m:\n\u001b[1;32m    127\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m [{\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mResult\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mNo good Google Search Result was found\u001b[39m\u001b[38;5;124m\"\u001b[39m}]\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_community/utilities/google_search.py:62\u001b[0m, in \u001b[0;36mGoogleSearchAPIWrapper._google_search_results\u001b[0;34m(self, search_term, **kwargs)\u001b[0m\n\u001b[1;32m     60\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msiterestrict:\n\u001b[1;32m     61\u001b[0m     cse \u001b[38;5;241m=\u001b[39m cse\u001b[38;5;241m.\u001b[39msiterestrict()\n\u001b[0;32m---> 62\u001b[0m res \u001b[38;5;241m=\u001b[39m \u001b[43mcse\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mlist\u001b[49m\u001b[43m(\u001b[49m\u001b[43mq\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43msearch_term\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcx\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mgoogle_cse_id\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mexecute\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     63\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m res\u001b[38;5;241m.\u001b[39mget(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mitems\u001b[39m\u001b[38;5;124m\"\u001b[39m, [])\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/googleapiclient/_helpers.py:130\u001b[0m, in \u001b[0;36mpositional.<locals>.positional_decorator.<locals>.positional_wrapper\u001b[0;34m(*args, **kwargs)\u001b[0m\n\u001b[1;32m    128\u001b[0m     \u001b[38;5;28;01melif\u001b[39;00m positional_parameters_enforcement \u001b[38;5;241m==\u001b[39m POSITIONAL_WARNING:\n\u001b[1;32m    129\u001b[0m         logger\u001b[38;5;241m.\u001b[39mwarning(message)\n\u001b[0;32m--> 130\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mwrapped\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/googleapiclient/http.py:923\u001b[0m, in \u001b[0;36mHttpRequest.execute\u001b[0;34m(self, http, num_retries)\u001b[0m\n\u001b[1;32m    920\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mheaders[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcontent-length\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mstr\u001b[39m(\u001b[38;5;28mlen\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbody))\n\u001b[1;32m    922\u001b[0m \u001b[38;5;66;03m# Handle retries for server-side errors.\u001b[39;00m\n\u001b[0;32m--> 923\u001b[0m resp, content \u001b[38;5;241m=\u001b[39m \u001b[43m_retry_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    924\u001b[0m \u001b[43m    \u001b[49m\u001b[43mhttp\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    925\u001b[0m \u001b[43m    \u001b[49m\u001b[43mnum_retries\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    926\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mrequest\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m    927\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_sleep\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    928\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_rand\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    929\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mstr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43muri\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    930\u001b[0m \u001b[43m    \u001b[49m\u001b[43mmethod\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mstr\u001b[39;49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmethod\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    931\u001b[0m \u001b[43m    \u001b[49m\u001b[43mbody\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    932\u001b[0m \u001b[43m    \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    933\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    935\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m callback \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mresponse_callbacks:\n\u001b[1;32m    936\u001b[0m     callback(resp)\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/googleapiclient/http.py:222\u001b[0m, in \u001b[0;36m_retry_request\u001b[0;34m(http, num_retries, req_type, sleep, rand, uri, method, *args, **kwargs)\u001b[0m\n\u001b[1;32m    220\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m exception:\n\u001b[1;32m    221\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m retry_num \u001b[38;5;241m==\u001b[39m num_retries:\n\u001b[0;32m--> 222\u001b[0m         \u001b[38;5;28;01mraise\u001b[39;00m exception\n\u001b[1;32m    223\u001b[0m     \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    224\u001b[0m         \u001b[38;5;28;01mcontinue\u001b[39;00m\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/googleapiclient/http.py:191\u001b[0m, in \u001b[0;36m_retry_request\u001b[0;34m(http, num_retries, req_type, sleep, rand, uri, method, *args, **kwargs)\u001b[0m\n\u001b[1;32m    189\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m    190\u001b[0m     exception \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m--> 191\u001b[0m     resp, content \u001b[38;5;241m=\u001b[39m \u001b[43mhttp\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest\u001b[49m\u001b[43m(\u001b[49m\u001b[43muri\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    192\u001b[0m \u001b[38;5;66;03m# Retry on SSL errors and socket timeout errors.\u001b[39;00m\n\u001b[1;32m    193\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m _ssl_SSLError \u001b[38;5;28;01mas\u001b[39;00m ssl_error:\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/httplib2/__init__.py:1724\u001b[0m, in \u001b[0;36mHttp.request\u001b[0;34m(self, uri, method, body, headers, redirections, connection_type)\u001b[0m\n\u001b[1;32m   1722\u001b[0m             content \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mb\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m   1723\u001b[0m         \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m-> 1724\u001b[0m             (response, content) \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m   1725\u001b[0m \u001b[43m                \u001b[49m\u001b[43mconn\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mauthority\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43muri\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrequest_uri\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mredirections\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcachekey\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m   1726\u001b[0m \u001b[43m            \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1727\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m   1728\u001b[0m     is_timeout \u001b[38;5;241m=\u001b[39m \u001b[38;5;28misinstance\u001b[39m(e, socket\u001b[38;5;241m.\u001b[39mtimeout)\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/httplib2/__init__.py:1444\u001b[0m, in \u001b[0;36mHttp._request\u001b[0;34m(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey)\u001b[0m\n\u001b[1;32m   1441\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m auth:\n\u001b[1;32m   1442\u001b[0m     auth\u001b[38;5;241m.\u001b[39mrequest(method, request_uri, headers, body)\n\u001b[0;32m-> 1444\u001b[0m (response, content) \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_conn_request\u001b[49m\u001b[43m(\u001b[49m\u001b[43mconn\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrequest_uri\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmethod\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mbody\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1446\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m auth:\n\u001b[1;32m   1447\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m auth\u001b[38;5;241m.\u001b[39mresponse(response, body):\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/httplib2/__init__.py:1366\u001b[0m, in \u001b[0;36mHttp._conn_request\u001b[0;34m(self, conn, request_uri, method, body, headers)\u001b[0m\n\u001b[1;32m   1364\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m   1365\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m conn\u001b[38;5;241m.\u001b[39msock \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 1366\u001b[0m         \u001b[43mconn\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1367\u001b[0m     conn\u001b[38;5;241m.\u001b[39mrequest(method, request_uri, body, headers)\n\u001b[1;32m   1368\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m socket\u001b[38;5;241m.\u001b[39mtimeout:\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/httplib2/__init__.py:1156\u001b[0m, in \u001b[0;36mHTTPSConnectionWithTimeout.connect\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1154\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m has_timeout(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtimeout):\n\u001b[1;32m   1155\u001b[0m     sock\u001b[38;5;241m.\u001b[39msettimeout(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtimeout)\n\u001b[0;32m-> 1156\u001b[0m \u001b[43msock\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mconnect\u001b[49m\u001b[43m(\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mhost\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mport\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1158\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msock \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_context\u001b[38;5;241m.\u001b[39mwrap_socket(sock, server_hostname\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhost)\n\u001b[1;32m   1160\u001b[0m \u001b[38;5;66;03m# Python 3.3 compatibility: emulate the check_hostname behavior\u001b[39;00m\n",
      "\u001b[0;31mTimeoutError\u001b[0m: [Errno 110] Connection timed out"
     ]
    }
   ],
   "source": [
    "linkSearch('宁波霍科电器有限公司')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "d6897d34-8470-426f-b6ca-8034206b9c82",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T06:53:07.226296Z",
     "iopub.status.busy": "2024-01-30T06:53:07.226051Z",
     "iopub.status.idle": "2024-01-30T06:53:38.969280Z",
     "shell.execute_reply": "2024-01-30T06:53:38.968690Z",
     "shell.execute_reply.started": "2024-01-30T06:53:07.226279Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "--2024-01-30 14:53:07--  https://www.google.com/\n",
      "正在解析主机 www.google.com (www.google.com)... 108.160.165.173, 2a03:2880:f127:83:face:b00c:0:25de\n",
      "正在连接 www.google.com (www.google.com)|108.160.165.173|:443... 失败：连接超时。\n",
      "正在连接 www.google.com (www.google.com)|2a03:2880:f127:83:face:b00c:0:25de|:443... 失败：网络不可达。\n"
     ]
    }
   ],
   "source": [
    "!wget  \"https://www.google.com\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "id": "d6ec4c16-e935-4da9-abf6-779729d3da04",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T06:51:48.355486Z",
     "iopub.status.busy": "2024-01-30T06:51:48.355180Z",
     "iopub.status.idle": "2024-01-30T06:51:49.715579Z",
     "shell.execute_reply": "2024-01-30T06:51:49.715030Z",
     "shell.execute_reply.started": "2024-01-30T06:51:48.355467Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<!DOCTYPE html>\n",
      "<!--STATUS OK--><html> <head><meta http-equiv=content-type content=text/html;charset=utf-8><meta http-equiv=X-UA-Compatible content=IE=Edge><meta content=always name=referrer><link rel=stylesheet type=text/css href=https://ss1.bdstatic.com/5eN1bjq8AAUYm2zgoY3K/r/www/cache/bdorz/baidu.min.css><title>百度一下，你就知道</title></head> <body link=#0000cc> <div id=wrapper> <div id=head> <div class=head_wrapper> <div class=s_form> <div class=s_form_wrapper> <div id=lg> <img hidefocus=true src=//www.baidu.com/img/bd_logo1.png width=270 height=129> </div> <form id=form name=f action=//www.baidu.com/s class=fm> <input type=hidden name=bdorz_come value=1> <input type=hidden name=ie value=utf-8> <input type=hidden name=f value=8> <input type=hidden name=rsv_bp value=1> <input type=hidden name=rsv_idx value=1> <input type=hidden name=tn value=baidu><span class=\"bg s_ipt_wr\"><input id=kw name=wd class=s_ipt value maxlength=255 autocomplete=off autofocus=autofocus></span><span class=\"bg s_btn_wr\"><input type=submit id=su value=百度一下 class=\"bg s_btn\" autofocus></span> </form> </div> </div> <div id=u1> <a href=http://news.baidu.com name=tj_trnews class=mnav>新闻</a> <a href=https://www.hao123.com name=tj_trhao123 class=mnav>hao123</a> <a href=http://map.baidu.com name=tj_trmap class=mnav>地图</a> <a href=http://v.baidu.com name=tj_trvideo class=mnav>视频</a> <a href=http://tieba.baidu.com name=tj_trtieba class=mnav>贴吧</a> <noscript> <a href=http://www.baidu.com/bdorz/login.gif?login&amp;tpl=mn&amp;u=http%3A%2F%2Fwww.baidu.com%2f%3fbdorz_come%3d1 name=tj_login class=lb>登录</a> </noscript> <script>document.write('<a href=\"http://www.baidu.com/bdorz/login.gif?login&tpl=mn&u='+ encodeURIComponent(window.location.href+ (window.location.search === \"\" ? \"?\" : \"&\")+ \"bdorz_come=1\")+ '\" name=\"tj_login\" class=\"lb\">登录</a>');\n",
      "                </script> <a href=//www.baidu.com/more/ name=tj_briicon class=bri style=\"display: block;\">更多产品</a> </div> </div> </div> <div id=ftCon> <div id=ftConw> <p id=lh> <a href=http://home.baidu.com>关于百度</a> <a href=http://ir.baidu.com>About Baidu</a> </p> <p id=cp>&copy;2017&nbsp;Baidu&nbsp;<a href=http://www.baidu.com/duty/>使用百度前必读</a>&nbsp; <a href=http://jianyi.baidu.com/ class=cp-feedback>意见反馈</a>&nbsp;京ICP证030173号&nbsp; <img src=//www.baidu.com/img/gs.gif> </p> </div> </div> </div> </body> </html>\n"
     ]
    }
   ],
   "source": [
    "!curl \"https://www.baidu.com\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "id": "0e670e20-1f75-476e-89f5-4ef5c7d36f13",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:52:35.316284Z",
     "iopub.status.busy": "2024-01-30T06:52:35.315947Z",
     "iopub.status.idle": "2024-01-30T06:53:07.225045Z",
     "shell.execute_reply": "2024-01-30T06:53:07.224506Z",
     "shell.execute_reply.started": "2024-01-30T06:52:35.316261Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "curl: (28) Failed to connect to www.google.com port 80 after 31739 ms: 连接超时\n"
     ]
    }
   ],
   "source": [
    "!curl  \"http://www.google.com\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "045f14c7-08b6-4f9e-8af8-177a7e904c82",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:55:26.570147Z",
     "iopub.status.busy": "2024-01-30T06:55:26.569769Z",
     "iopub.status.idle": "2024-01-30T06:55:58.933483Z",
     "shell.execute_reply": "2024-01-30T06:55:58.932741Z",
     "shell.execute_reply.started": "2024-01-30T06:55:26.570124Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/opt/conda/lib/python3.10/site-packages/langchain_community/utilities/duckduckgo_search.py:47: UserWarning: DDGS running in an async loop. This may cause errors. Use AsyncDDGS instead.\n",
      "  with DDGS() as ddgs:\n"
     ]
    },
    {
     "ename": "DuckDuckGoSearchException",
     "evalue": "_aget_url() https://duckduckgo.com RequestsError: Failed to perform, ErrCode: 28, Reason: 'Failed to connect to duckduckgo.com port 443 after 31722 ms: Couldn't connect to server'. This may be a libcurl error, See https://curl.se/libcurl/c/libcurl-errors.html first for more details.",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mRequestsError\u001b[0m                             Traceback (most recent call last)",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/duckduckgo_search/duckduckgo_search_async.py:52\u001b[0m, in \u001b[0;36mAsyncDDGS._aget_url\u001b[0;34m(self, method, url, **kwargs)\u001b[0m\n\u001b[1;32m     51\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 52\u001b[0m     resp \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mawait\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_asession\u001b[38;5;241m.\u001b[39mrequest(method, url, stream\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m     53\u001b[0m     resp\u001b[38;5;241m.\u001b[39mraise_for_status()\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/curl_cffi/requests/session.py:959\u001b[0m, in \u001b[0;36mAsyncSession.request\u001b[0;34m(self, method, url, params, data, json, headers, cookies, files, auth, timeout, allow_redirects, max_redirects, proxies, verify, referer, accept_encoding, content_callback, impersonate, default_headers, http_version, interface, stream, max_recv_speed)\u001b[0m\n\u001b[1;32m    958\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mrelease_curl(curl)\n\u001b[0;32m--> 959\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m first_element\n\u001b[1;32m    961\u001b[0m rsp\u001b[38;5;241m.\u001b[39mrequest \u001b[38;5;241m=\u001b[39m req\n",
      "\u001b[0;31mRequestsError\u001b[0m: Failed to perform, ErrCode: 28, Reason: 'Failed to connect to duckduckgo.com port 443 after 31722 ms: Couldn't connect to server'. This may be a libcurl error, See https://curl.se/libcurl/c/libcurl-errors.html first for more details.",
      "\nThe above exception was the direct cause of the following exception:\n",
      "\u001b[0;31mDuckDuckGoSearchException\u001b[0m                 Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[22], line 3\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mlangchain\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mtools\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m DuckDuckGoSearchRun\n\u001b[1;32m      2\u001b[0m search \u001b[38;5;241m=\u001b[39m DuckDuckGoSearchRun()\n\u001b[0;32m----> 3\u001b[0m \u001b[43msearch\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mTesla stock price?\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_core/tools.py:373\u001b[0m, in \u001b[0;36mBaseTool.run\u001b[0;34m(self, tool_input, verbose, start_color, color, callbacks, tags, metadata, run_name, **kwargs)\u001b[0m\n\u001b[1;32m    371\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mException\u001b[39;00m, \u001b[38;5;167;01mKeyboardInterrupt\u001b[39;00m) \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m    372\u001b[0m     run_manager\u001b[38;5;241m.\u001b[39mon_tool_error(e)\n\u001b[0;32m--> 373\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[1;32m    374\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    375\u001b[0m     run_manager\u001b[38;5;241m.\u001b[39mon_tool_end(\n\u001b[1;32m    376\u001b[0m         \u001b[38;5;28mstr\u001b[39m(observation), color\u001b[38;5;241m=\u001b[39mcolor, name\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mname, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs\n\u001b[1;32m    377\u001b[0m     )\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_core/tools.py:345\u001b[0m, in \u001b[0;36mBaseTool.run\u001b[0;34m(self, tool_input, verbose, start_color, color, callbacks, tags, metadata, run_name, **kwargs)\u001b[0m\n\u001b[1;32m    342\u001b[0m     parsed_input \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_parse_input(tool_input)\n\u001b[1;32m    343\u001b[0m     tool_args, tool_kwargs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_to_args_and_kwargs(parsed_input)\n\u001b[1;32m    344\u001b[0m     observation \u001b[38;5;241m=\u001b[39m (\n\u001b[0;32m--> 345\u001b[0m         \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mtool_args\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrun_manager\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mrun_manager\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mtool_kwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    346\u001b[0m         \u001b[38;5;28;01mif\u001b[39;00m new_arg_supported\n\u001b[1;32m    347\u001b[0m         \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_run(\u001b[38;5;241m*\u001b[39mtool_args, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mtool_kwargs)\n\u001b[1;32m    348\u001b[0m     )\n\u001b[1;32m    349\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m ToolException \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[1;32m    350\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mhandle_tool_error:\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_community/tools/ddg_search/tool.py:39\u001b[0m, in \u001b[0;36mDuckDuckGoSearchRun._run\u001b[0;34m(self, query, run_manager)\u001b[0m\n\u001b[1;32m     33\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_run\u001b[39m(\n\u001b[1;32m     34\u001b[0m     \u001b[38;5;28mself\u001b[39m,\n\u001b[1;32m     35\u001b[0m     query: \u001b[38;5;28mstr\u001b[39m,\n\u001b[1;32m     36\u001b[0m     run_manager: Optional[CallbackManagerForToolRun] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[1;32m     37\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28mstr\u001b[39m:\n\u001b[1;32m     38\u001b[0m \u001b[38;5;250m    \u001b[39m\u001b[38;5;124;03m\"\"\"Use the tool.\"\"\"\u001b[39;00m\n\u001b[0;32m---> 39\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mapi_wrapper\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_community/utilities/duckduckgo_search.py:81\u001b[0m, in \u001b[0;36mDuckDuckGoSearchAPIWrapper.run\u001b[0;34m(self, query)\u001b[0m\n\u001b[1;32m     79\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Run query through DuckDuckGo and return concatenated results.\"\"\"\u001b[39;00m\n\u001b[1;32m     80\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msource \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtext\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[0;32m---> 81\u001b[0m     results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_ddgs_text\u001b[49m\u001b[43m(\u001b[49m\u001b[43mquery\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     82\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39msource \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mnews\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m     83\u001b[0m     results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_ddgs_news(query)\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_community/utilities/duckduckgo_search.py:57\u001b[0m, in \u001b[0;36mDuckDuckGoSearchAPIWrapper._ddgs_text\u001b[0;34m(self, query, max_results)\u001b[0m\n\u001b[1;32m     48\u001b[0m     ddgs_gen \u001b[38;5;241m=\u001b[39m ddgs\u001b[38;5;241m.\u001b[39mtext(\n\u001b[1;32m     49\u001b[0m         query,\n\u001b[1;32m     50\u001b[0m         region\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mregion,\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m     54\u001b[0m         backend\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbackend,\n\u001b[1;32m     55\u001b[0m     )\n\u001b[1;32m     56\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m ddgs_gen:\n\u001b[0;32m---> 57\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m [r \u001b[38;5;28;01mfor\u001b[39;00m r \u001b[38;5;129;01min\u001b[39;00m ddgs_gen]\n\u001b[1;32m     58\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m []\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/langchain_community/utilities/duckduckgo_search.py:57\u001b[0m, in \u001b[0;36m<listcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m     48\u001b[0m     ddgs_gen \u001b[38;5;241m=\u001b[39m ddgs\u001b[38;5;241m.\u001b[39mtext(\n\u001b[1;32m     49\u001b[0m         query,\n\u001b[1;32m     50\u001b[0m         region\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mregion,\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m     54\u001b[0m         backend\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbackend,\n\u001b[1;32m     55\u001b[0m     )\n\u001b[1;32m     56\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m ddgs_gen:\n\u001b[0;32m---> 57\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m [r \u001b[38;5;28;01mfor\u001b[39;00m r \u001b[38;5;129;01min\u001b[39;00m ddgs_gen]\n\u001b[1;32m     58\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m []\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/duckduckgo_search/duckduckgo_search.py:31\u001b[0m, in \u001b[0;36mDDGS._iter_over_async\u001b[0;34m(self, async_gen)\u001b[0m\n\u001b[1;32m     29\u001b[0m \u001b[38;5;28;01mwhile\u001b[39;00m \u001b[38;5;28;01mTrue\u001b[39;00m:\n\u001b[1;32m     30\u001b[0m     \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m---> 31\u001b[0m         \u001b[38;5;28;01myield\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_loop\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun_until_complete\u001b[49m\u001b[43m(\u001b[49m\u001b[43masync_gen\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[38;5;21;43m__anext__\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m     32\u001b[0m     \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mStopAsyncIteration\u001b[39;00m:\n\u001b[1;32m     33\u001b[0m         \u001b[38;5;28;01mbreak\u001b[39;00m\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/nest_asyncio.py:98\u001b[0m, in \u001b[0;36m_patch_loop.<locals>.run_until_complete\u001b[0;34m(self, future)\u001b[0m\n\u001b[1;32m     95\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m f\u001b[38;5;241m.\u001b[39mdone():\n\u001b[1;32m     96\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\n\u001b[1;32m     97\u001b[0m         \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mEvent loop stopped before Future completed.\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m---> 98\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mf\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mresult\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/asyncio/futures.py:201\u001b[0m, in \u001b[0;36mFuture.result\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    199\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m__log_traceback \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n\u001b[1;32m    200\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_exception \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m--> 201\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_exception\u001b[38;5;241m.\u001b[39mwith_traceback(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_exception_tb)\n\u001b[1;32m    202\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_result\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/asyncio/tasks.py:232\u001b[0m, in \u001b[0;36mTask.__step\u001b[0;34m(***failed resolving arguments***)\u001b[0m\n\u001b[1;32m    228\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m    229\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m exc \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    230\u001b[0m         \u001b[38;5;66;03m# We use the `send` method directly, because coroutines\u001b[39;00m\n\u001b[1;32m    231\u001b[0m         \u001b[38;5;66;03m# don't have `__iter__` and `__next__` methods.\u001b[39;00m\n\u001b[0;32m--> 232\u001b[0m         result \u001b[38;5;241m=\u001b[39m \u001b[43mcoro\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msend\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[1;32m    233\u001b[0m     \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    234\u001b[0m         result \u001b[38;5;241m=\u001b[39m coro\u001b[38;5;241m.\u001b[39mthrow(exc)\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/duckduckgo_search/duckduckgo_search_async.py:102\u001b[0m, in \u001b[0;36mAsyncDDGS.text\u001b[0;34m(self, keywords, region, safesearch, timelimit, backend, max_results)\u001b[0m\n\u001b[1;32m     99\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m backend \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mlite\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m    100\u001b[0m     results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_text_lite(keywords, region, timelimit, max_results)\n\u001b[0;32m--> 102\u001b[0m \u001b[38;5;28;01masync\u001b[39;00m \u001b[38;5;28;01mfor\u001b[39;00m result \u001b[38;5;129;01min\u001b[39;00m results:\n\u001b[1;32m    103\u001b[0m     \u001b[38;5;28;01myield\u001b[39;00m result\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/duckduckgo_search/duckduckgo_search_async.py:128\u001b[0m, in \u001b[0;36mAsyncDDGS._text_api\u001b[0;34m(self, keywords, region, safesearch, timelimit, max_results)\u001b[0m\n\u001b[1;32m    113\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"DuckDuckGo text search generator. Query params: https://duckduckgo.com/params.\u001b[39;00m\n\u001b[1;32m    114\u001b[0m \n\u001b[1;32m    115\u001b[0m \u001b[38;5;124;03mArgs:\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    124\u001b[0m \n\u001b[1;32m    125\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m    126\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m keywords, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mkeywords is mandatory\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m--> 128\u001b[0m vqd \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mawait\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_aget_vqd(keywords)\n\u001b[1;32m    130\u001b[0m payload \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m    131\u001b[0m     \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mq\u001b[39m\u001b[38;5;124m\"\u001b[39m: keywords,\n\u001b[1;32m    132\u001b[0m     \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mkl\u001b[39m\u001b[38;5;124m\"\u001b[39m: region,\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    139\u001b[0m     \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msp\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m0\u001b[39m\u001b[38;5;124m\"\u001b[39m,\n\u001b[1;32m    140\u001b[0m }\n\u001b[1;32m    141\u001b[0m safesearch \u001b[38;5;241m=\u001b[39m safesearch\u001b[38;5;241m.\u001b[39mlower()\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/duckduckgo_search/duckduckgo_search_async.py:65\u001b[0m, in \u001b[0;36mAsyncDDGS._aget_vqd\u001b[0;34m(self, keywords)\u001b[0m\n\u001b[1;32m     63\u001b[0m \u001b[38;5;28;01masync\u001b[39;00m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_aget_vqd\u001b[39m(\u001b[38;5;28mself\u001b[39m, keywords: \u001b[38;5;28mstr\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Optional[\u001b[38;5;28mstr\u001b[39m]:\n\u001b[1;32m     64\u001b[0m \u001b[38;5;250m    \u001b[39m\u001b[38;5;124;03m\"\"\"Get vqd value for a search query.\"\"\"\u001b[39;00m\n\u001b[0;32m---> 65\u001b[0m     resp_content \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mawait\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_aget_url(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPOST\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mhttps://duckduckgo.com\u001b[39m\u001b[38;5;124m\"\u001b[39m, data\u001b[38;5;241m=\u001b[39m{\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mq\u001b[39m\u001b[38;5;124m\"\u001b[39m: keywords})\n\u001b[1;32m     66\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m resp_content:\n\u001b[1;32m     67\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m _extract_vqd(resp_content, keywords)\n",
      "File \u001b[0;32m/opt/conda/lib/python3.10/site-packages/duckduckgo_search/duckduckgo_search_async.py:61\u001b[0m, in \u001b[0;36mAsyncDDGS._aget_url\u001b[0;34m(self, method, url, **kwargs)\u001b[0m\n\u001b[1;32m     59\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m resp_content\n\u001b[1;32m     60\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m ex:\n\u001b[0;32m---> 61\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m DuckDuckGoSearchException(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m_aget_url() \u001b[39m\u001b[38;5;132;01m{\u001b[39;00murl\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m \u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mtype\u001b[39m(ex)\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mex\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mex\u001b[39;00m\n",
      "\u001b[0;31mDuckDuckGoSearchException\u001b[0m: _aget_url() https://duckduckgo.com RequestsError: Failed to perform, ErrCode: 28, Reason: 'Failed to connect to duckduckgo.com port 443 after 31722 ms: Couldn't connect to server'. This may be a libcurl error, See https://curl.se/libcurl/c/libcurl-errors.html first for more details."
     ]
    }
   ],
   "source": [
    "from langchain.tools import DuckDuckGoSearchRun\n",
    "search = DuckDuckGoSearchRun()\n",
    "search.run(\"Tesla stock price?\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "690bcaac-a695-49dd-bb0a-68c32a0b1e02",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T06:55:12.341953Z",
     "iopub.status.busy": "2024-01-30T06:55:12.341589Z",
     "iopub.status.idle": "2024-01-30T06:55:21.585768Z",
     "shell.execute_reply": "2024-01-30T06:55:21.585155Z",
     "shell.execute_reply.started": "2024-01-30T06:55:12.341929Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: https://mirrors.aliyun.com/pypi/simple\n",
      "Collecting duckduckgo-search\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/e4/e4/b3a8ec8efe4ed5ffd10089c056531573900c3699832285212d4d442d45eb/duckduckgo_search-4.4-py3-none-any.whl (20 kB)\n",
      "Collecting docstring-inheritance>=2.1.2 (from duckduckgo-search)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/d6/c5/fca4270777091c3b7275409fa7ba51768197da64dfe26cc9f5989c19403c/docstring_inheritance-2.1.2-py3-none-any.whl (21 kB)\n",
      "Requirement already satisfied: click>=8.1.7 in /opt/conda/lib/python3.10/site-packages (from duckduckgo-search) (8.1.7)\n",
      "Collecting curl-cffi>=0.6.0b7 (from duckduckgo-search)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/54/b6/e0ed7de404035bf35bd89ce73b97d0ffc7eece855959b8abb19ad483ffb9/curl_cffi-0.6.0b7-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (6.9 MB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m6.9/6.9 MB\u001b[0m \u001b[31m1.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m00:01\u001b[0m00:01\u001b[0m\n",
      "\u001b[?25hRequirement already satisfied: lxml>=4.9.3 in /opt/conda/lib/python3.10/site-packages (from duckduckgo-search) (4.9.4)\n",
      "Collecting nest-asyncio>=1.6.0 (from duckduckgo-search)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl (5.2 kB)\n",
      "Requirement already satisfied: cffi>=1.12.0 in /opt/conda/lib/python3.10/site-packages (from curl-cffi>=0.6.0b7->duckduckgo-search) (1.15.1)\n",
      "Requirement already satisfied: certifi in /opt/conda/lib/python3.10/site-packages (from curl-cffi>=0.6.0b7->duckduckgo-search) (2023.11.17)\n",
      "Requirement already satisfied: pycparser in /opt/conda/lib/python3.10/site-packages (from cffi>=1.12.0->curl-cffi>=0.6.0b7->duckduckgo-search) (2.21)\n",
      "\u001b[33mDEPRECATION: omegaconf 2.0.6 has a non-standard dependency specifier PyYAML>=5.1.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of omegaconf or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0m\u001b[33mDEPRECATION: pytorch-lightning 1.7.7 has a non-standard dependency specifier torch>=1.9.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of pytorch-lightning or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0mInstalling collected packages: nest-asyncio, docstring-inheritance, curl-cffi, duckduckgo-search\n",
      "  Attempting uninstall: nest-asyncio\n",
      "    Found existing installation: nest-asyncio 1.5.8\n",
      "    Uninstalling nest-asyncio-1.5.8:\n",
      "      Successfully uninstalled nest-asyncio-1.5.8\n",
      "Successfully installed curl-cffi-0.6.0b7 docstring-inheritance-2.1.2 duckduckgo-search-4.4 nest-asyncio-1.6.0\n",
      "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
      "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n"
     ]
    }
   ],
   "source": [
    "pip install -U duckduckgo-search"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6f67498e-c552-4be2-b8f7-bb47e498e68a",
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "!ping duckduckgo.com"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "47db18ec-7ba9-4b6a-aa4d-49ab487fa491",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T07:15:45.255519Z",
     "iopub.status.busy": "2024-01-30T07:15:45.255268Z",
     "iopub.status.idle": "2024-01-30T07:15:56.369216Z",
     "shell.execute_reply": "2024-01-30T07:15:56.368621Z",
     "shell.execute_reply.started": "2024-01-30T07:15:45.255502Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "PING bing.com (204.79.197.200) 56(84) bytes of data.\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=1 ttl=112 time=60.6 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=2 ttl=112 time=60.5 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=3 ttl=112 time=60.3 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=4 ttl=112 time=60.3 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=5 ttl=112 time=60.4 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=6 ttl=112 time=60.5 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=7 ttl=112 time=60.3 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=8 ttl=112 time=60.3 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=9 ttl=112 time=60.3 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=10 ttl=112 time=60.3 ms\n",
      "64 bytes from a-0001.a-msedge.net (204.79.197.200): icmp_seq=11 ttl=112 time=60.3 ms\n",
      "^C\n",
      "\n",
      "--- bing.com ping statistics ---\n",
      "11 packets transmitted, 11 received, 0% packet loss, time 10010ms\n",
      "rtt min/avg/max/mdev = 60.291/60.378/60.611/0.098 ms\n"
     ]
    }
   ],
   "source": [
    "!ping bing.com"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "8798faa3-8693-464e-88b8-89cd41305365",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T07:39:18.441576Z",
     "iopub.status.busy": "2024-01-30T07:39:18.441243Z",
     "iopub.status.idle": "2024-01-30T07:39:18.863098Z",
     "shell.execute_reply": "2024-01-30T07:39:18.862651Z",
     "shell.execute_reply.started": "2024-01-30T07:39:18.441555Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1111111\n",
      "http://cn.bing.com/search?q=%E4%BA%94%E6%9C%88%E5%A4%A92023%E5%B9%B4%E6%BC%94%E5%94%B1%E4%BC%9A%E9%97%A8%E7%A5%A8%E5%A4%9A%E5%B0%91%E9%92%B1\n",
      "1111112\n",
      "1111113\n",
      "1111114\n",
      "1111115\n",
      "[]\n",
      "1111116\n",
      "\n"
     ]
    }
   ],
   "source": [
    "import re,urllib.parse,urllib.request,urllib.error\n",
    "from bs4 import BeautifulSoup as BS\n",
    "\n",
    "# 搜索框中的搜索内容\n",
    "word = '五月天2023年演唱会门票多少钱'\n",
    "\n",
    "# 获取bing搜索的结果\n",
    "def get_bing_results(word):\n",
    "    baseUrl = 'http://cn.bing.com/search?'\n",
    "    word = word.encode(encoding='utf-8', errors='strict')\n",
    "\n",
    "    data = {'q':word}\n",
    "    data = urllib.parse.urlencode(data)\n",
    "    url = baseUrl+data\n",
    "    print('1111111')\n",
    "    print(url)\n",
    "\n",
    "    print('1111112')\n",
    "    try:\n",
    "        html = urllib.request.urlopen(url)\n",
    "        print('1111113')\n",
    "    except urllib.error.HTTPError as e:\n",
    "        print(e.code)\n",
    "    except urllib.error.URLError as e:\n",
    "        print(e.reason)\n",
    "\n",
    "    print('1111114')\n",
    "    # 解析html\n",
    "    soup = BS(html,\"html.parser\")\n",
    "    context = soup.findAll(class_=\"b_lineclamp4 b_algoSlug\")\n",
    "    print('1111115')\n",
    "    print(context)\n",
    "    print('1111116')\n",
    "    results = \"\"\n",
    "    for i in range(len(context)):\n",
    "        if '\\u2002·\\u2002' not in str(context[i]): continue\n",
    "        results += (str(i)+'）')\n",
    "        results += (str(context[i]).split('\\u2002·\\u2002')[1].replace('</p>',''))\n",
    "\n",
    "    # 返回soup, context用于debug，有时候results是空的，这是因为搜索失败导致的\n",
    "    return results, soup, context\n",
    "\n",
    "results, soup, context = get_bing_results(word)\n",
    "# print(soup)\n",
    "print(results)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "id": "1c4d8320-fe31-4881-94a5-97b89d3e8d66",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T07:37:26.694366Z",
     "iopub.status.busy": "2024-01-30T07:37:26.694046Z",
     "iopub.status.idle": "2024-01-30T07:37:27.690246Z",
     "shell.execute_reply": "2024-01-30T07:37:27.689803Z",
     "shell.execute_reply.started": "2024-01-30T07:37:26.694347Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "http://cn.bing.com/search?q=%E6%B5%B7%E5%B0%94%E9%9B%86%E5%9B%A2\n",
      "\n"
     ]
    }
   ],
   "source": [
    "results, soup, context = get_bing_results(\"海尔集团\")\n",
    "# print(soup)\n",
    "print(results)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "id": "ed4f2ea4-d511-4cab-b90c-6c919a02dd48",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T07:39:41.422960Z",
     "iopub.status.busy": "2024-01-30T07:39:41.422596Z",
     "iopub.status.idle": "2024-01-30T07:39:41.610336Z",
     "shell.execute_reply": "2024-01-30T07:39:41.609895Z",
     "shell.execute_reply.started": "2024-01-30T07:39:41.422939Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1111111\n",
      "http://baidu.com/search?q=%E4%BA%94%E6%9C%88%E5%A4%A92023%E5%B9%B4%E6%BC%94%E5%94%B1%E4%BC%9A%E9%97%A8%E7%A5%A8%E5%A4%9A%E5%B0%91%E9%92%B1\n",
      "1111112\n",
      "1111113\n",
      "1111114\n",
      "1111115\n",
      "[]\n",
      "1111116\n",
      "\n"
     ]
    }
   ],
   "source": [
    "import re,urllib.parse,urllib.request,urllib.error\n",
    "from bs4 import BeautifulSoup as BS\n",
    "\n",
    "# 搜索框中的搜索内容\n",
    "word = '五月天2023年演唱会门票多少钱'\n",
    "\n",
    "# 获取bing搜索的结果\n",
    "def get_bing_results(word):\n",
    "    baseUrl = 'http://baidu.com/search?'\n",
    "    word = word.encode(encoding='utf-8', errors='strict')\n",
    "\n",
    "    data = {'q':word}\n",
    "    data = urllib.parse.urlencode(data)\n",
    "    url = baseUrl+data\n",
    "    print('1111111')\n",
    "    print(url)\n",
    "\n",
    "    print('1111112')\n",
    "    try:\n",
    "        html = urllib.request.urlopen(url)\n",
    "        print('1111113')\n",
    "    except urllib.error.HTTPError as e:\n",
    "        print(e.code)\n",
    "    except urllib.error.URLError as e:\n",
    "        print(e.reason)\n",
    "\n",
    "    print('1111114')\n",
    "    # 解析html\n",
    "    soup = BS(html,\"html.parser\")\n",
    "    context = soup.findAll(class_=\"b_lineclamp4 b_algoSlug\")\n",
    "    print('1111115')\n",
    "    print(context)\n",
    "    print('1111116')\n",
    "    results = \"\"\n",
    "    for i in range(len(context)):\n",
    "        if '\\u2002·\\u2002' not in str(context[i]): continue\n",
    "        results += (str(i)+'）')\n",
    "        results += (str(context[i]).split('\\u2002·\\u2002')[1].replace('</p>',''))\n",
    "\n",
    "    # 返回soup, context用于debug，有时候results是空的，这是因为搜索失败导致的\n",
    "    return results, soup, context\n",
    "\n",
    "results, soup, context = get_bing_results(word)\n",
    "# print(soup)\n",
    "print(results)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "id": "ee14d231-4555-4864-bf3e-825030cec045",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T07:41:42.601165Z",
     "iopub.status.busy": "2024-01-30T07:41:42.600845Z",
     "iopub.status.idle": "2024-01-30T07:41:49.315373Z",
     "shell.execute_reply": "2024-01-30T07:41:49.314922Z",
     "shell.execute_reply.started": "2024-01-30T07:41:42.601147Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<Response [200]>\n",
      "None\n"
     ]
    }
   ],
   "source": [
    "import requests\n",
    "from bs4 import BeautifulSoup\n",
    " \n",
    "def get_baidu_search_result(keyword):\n",
    "    url = 'https://www.baidu.com/s'\n",
    "    params = {'wd': keyword}\n",
    "    headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3'}\n",
    "    response = requests.get(url, params=params, headers=headers)\n",
    "    print(response)\n",
    "    soup = BeautifulSoup(response.text, 'html.parser')\n",
    "    results = soup.find_all('div', class_='result')\n",
    "    for result in results:\n",
    "        try:\n",
    "            title = result.h3.a.text\n",
    "            link = result.h3.a['href']\n",
    "            desc = result.find('div', class_='c-abstract').text\n",
    "            print(title)\n",
    "            print(link)\n",
    "            print(desc)\n",
    "        except:\n",
    "            pass\n",
    " \n",
    "if __name__ == '__main__':\n",
    "    keyword = 'Python'\n",
    "    res = get_baidu_search_result(keyword)\n",
    "    print(res)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "id": "913eba0c-7299-4ddf-95f7-edec245a32be",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T07:48:13.440616Z",
     "iopub.status.busy": "2024-01-30T07:48:13.440295Z",
     "iopub.status.idle": "2024-01-30T07:48:22.244729Z",
     "shell.execute_reply": "2024-01-30T07:48:22.244283Z",
     "shell.execute_reply.started": "2024-01-30T07:48:13.440597Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "Drugs.com - Prescription Drug Information\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "drug - 搜索 词典\n",
      "药物在线-快捷药物信息平台-DrugFuture数据在线\n",
      "Drug_百度百科\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=11&Accept-Language=en-us\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "Drugs.com - Prescription Drug Information\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "drug - 搜索 词典\n",
      "药物在线-快捷药物信息平台-DrugFuture数据在线\n",
      "Drug_百度百科\n",
      "Nature Reviews Drug Discovery\n",
      "Drug Office | 藥 物 辦 公 室\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=25&Accept-Language=en-us\n",
      "Home | Drugs - Springer\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "DrugBank Online | Database for Drug and Drug Target Info\n",
      "Nature Reviews Drug Discovery\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Automating drug discovery | Nature Reviews Drug Discovery\n",
      "Drug_百度百科\n",
      "Drugs | FDA - U.S. Food and Drug Administration\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=39&Accept-Language=en-us\n",
      "Home | Drugs - Springer\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "DrugBank Online | Database for Drug and Drug Target Info\n",
      "Nature Reviews Drug Discovery\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Automating drug discovery | Nature Reviews Drug Discovery\n",
      "Drug_百度百科\n",
      "Drugs | FDA - U.S. Food and Drug Administration\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=53&Accept-Language=en-us\n",
      "Home | Drugs - Springer\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "DrugBank Online | Database for Drug and Drug Target Info\n",
      "Nature Reviews Drug Discovery\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Automating drug discovery | Nature Reviews Drug Discovery\n",
      "Drug_百度百科\n",
      "Drugs | FDA - U.S. Food and Drug Administration\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=67&Accept-Language=en-us\n",
      "Home | Drugs - Springer\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "DrugBank Online | Database for Drug and Drug Target Info\n",
      "Nature Reviews Drug Discovery\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Automating drug discovery | Nature Reviews Drug Discovery\n",
      "Drug_百度百科\n",
      "Drugs | FDA - U.S. Food and Drug Administration\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=81&Accept-Language=en-us\n",
      "Home | Drugs - Springer\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "DrugBank Online | Database for Drug and Drug Target Info\n",
      "Nature Reviews Drug Discovery\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Automating drug discovery | Nature Reviews Drug Discovery\n",
      "Drug_百度百科\n",
      "Drugs | FDA - U.S. Food and Drug Administration\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=95&Accept-Language=en-us\n",
      "Home | Drugs - Springer\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "DrugBank Online | Database for Drug and Drug Target Info\n",
      "Nature Reviews Drug Discovery\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Automating drug discovery | Nature Reviews Drug Discovery\n",
      "Drug_百度百科\n",
      "Drugs | FDA - U.S. Food and Drug Administration\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=109&Accept-Language=en-us\n",
      "Home | Drugs - Springer\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "DrugBank Online | Database for Drug and Drug Target Info\n",
      "Nature Reviews Drug Discovery\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Automating drug discovery | Nature Reviews Drug Discovery\n",
      "Drug_百度百科\n",
      "Drugs | FDA - U.S. Food and Drug Administration\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=123&Accept-Language=en-us\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "Drugs.com - Prescription Drug Information\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "drug - 搜索 词典\n",
      "药物在线-快捷药物信息平台-DrugFuture数据在线\n",
      "Drug_百度百科\n",
      "Nature Reviews Drug Discovery\n",
      "Drug Office | 藥 物 辦 公 室\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=137&Accept-Language=en-us\n",
      "DRUG中文(简体)翻译：剑桥词典 - Cambridge Dictionary\n",
      "「drug」同时表示毒品和药品的表达，外国人不觉得很尴尬吗 ...\n",
      "Drug | Definition, Types, Interactions, Abuse, & Facts\n",
      "Drugs.com - Prescription Drug Information\n",
      "drug是什么意思_drug的翻译_音标_读音_用法_例句_爱词霸 ...\n",
      "drug - 搜索 词典\n",
      "药物在线-快捷药物信息平台-DrugFuture数据在线\n",
      "Drug_百度百科\n",
      "Nature Reviews Drug Discovery\n",
      "Drug Office | 藥 物 辦 公 室\n",
      "http://global.bing.com/search?q=drug&qs=bs&ajf=60&first=151&Accept-Language=en-us\n"
     ]
    }
   ],
   "source": [
    "# -*- coding:utf-8 -*-\n",
    "import requests\n",
    "from lxml import etree\n",
    "\n",
    "import sys\n",
    "# reload(sys)\n",
    "# sys.setdefaultencoding(\"utf-8\")\n",
    "\n",
    "def getfrombing(word):\n",
    "  #  url='http://global.bing.com/search?q='+word+'&Accept-Language=en-us%3bq%3d0.5%2cen%3bq%3d0.3&Accept-Encoding=gzip%2c+deflate%2c+compress&Accept=text%2fhtml%2capplication%2fxhtml%2bxml%2capplication%2fxml%3bq%3d0.9%2c*%2f*%3bq%3d0.8&User-Agent=Mozilla%2f5.0+(X11%3b+Ubuntu%3b+Linux+x86_64%3b+rv%3a22.0)+Gecko%2f20100101+Firefox%2f22.0&Connection=keep-alive&Cache-Control=max-age%3d0&setmkt=en-us&setlang=en-us&FORM=SECNEN'\n",
    "    url = 'http://global.bing.com/search?q='+word+'&qs=bs&ajf=60&first=1&Accept-Language=en-us'\n",
    "    list = []\n",
    "    headers = {\n",
    "        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',\n",
    "        'Accept-Encoding': 'gzip, deflate, compress',\n",
    "        'Accept-Language': 'en-us;q=0.5,en;q=0.3',\n",
    "        'Cache-Control': 'max-age=0',\n",
    "        'Connection': 'keep-alive',\n",
    "        'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:22.0) Gecko/20100101 Firefox/22.0'\n",
    "    }\n",
    "\n",
    "    flag0 = 3\n",
    "    #爬取网页的数量\n",
    "    for k in range(0,11):\n",
    "        path = etree.HTML(requests.get(url=url, headers=headers).content.decode('utf-8'))\n",
    "        flag=15\n",
    "        if k == 0:\n",
    "            flag=9\n",
    "        for i in range(1,flag):\n",
    "            words=\"\"\n",
    "            for j in path.xpath('//*[@id=\"b_results\"]/li[%d]/h2/a//text()'%i):\n",
    "                words+=j\n",
    "            if len(words)>0:\n",
    "                print(words)\n",
    "                list.append(words)\n",
    "                pass\n",
    "            pass\n",
    "        flag0+=flag-1\n",
    "        url='http://global.bing.com/search?q='+word+'&qs=bs&ajf=60&first='+str(flag0)+'&Accept-Language=en-us'\n",
    "        print(url)\n",
    "    return list\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    getfrombing('drug')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 43,
   "id": "398782d0-7bf9-4af8-8ebc-e5a5aeb41ae2",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T07:48:53.806867Z",
     "iopub.status.busy": "2024-01-30T07:48:53.806529Z",
     "iopub.status.idle": "2024-01-30T07:49:01.374955Z",
     "shell.execute_reply": "2024-01-30T07:49:01.374545Z",
     "shell.execute_reply.started": "2024-01-30T07:48:53.806846Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=11&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=25&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=39&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=53&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=67&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=81&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=95&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=109&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=123&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=137&Accept-Language=en-us\n",
      "http://global.bing.com/search?q=成都迅达光电有限公司深圳分公司&qs=bs&ajf=60&first=151&Accept-Language=en-us\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "[]"
      ]
     },
     "execution_count": 43,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "getfrombing('成都迅达光电有限公司深圳分公司')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "id": "0680855f-6618-4708-af9c-5c036b23e797",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T07:58:10.787891Z",
     "iopub.status.busy": "2024-01-30T07:58:10.787573Z",
     "iopub.status.idle": "2024-01-30T07:58:12.398032Z",
     "shell.execute_reply": "2024-01-30T07:58:12.396429Z",
     "shell.execute_reply.started": "2024-01-30T07:58:10.787872Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "ename": "IndexError",
     "evalue": "list index out of range",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mIndexError\u001b[0m                                Traceback (most recent call last)",
      "Cell \u001b[0;32mIn[45], line 16\u001b[0m\n\u001b[1;32m     14\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m i \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mrange\u001b[39m(\u001b[38;5;241m10\u001b[39m):\n\u001b[1;32m     15\u001b[0m     r11 \u001b[38;5;241m=\u001b[39m r1[i]\u001b[38;5;241m.\u001b[39mxpath(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mstring(.)\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m---> 16\u001b[0m     r22 \u001b[38;5;241m=\u001b[39m \u001b[43mr2\u001b[49m\u001b[43m[\u001b[49m\u001b[43mi\u001b[49m\u001b[43m]\u001b[49m\u001b[38;5;241m.\u001b[39mxpath(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mstring(.)\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m     17\u001b[0m     r33 \u001b[38;5;241m=\u001b[39m r3[i]\n\u001b[1;32m     18\u001b[0m     \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mopen\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mok.txt\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124ma\u001b[39m\u001b[38;5;124m'\u001b[39m, encoding\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mutf-8\u001b[39m\u001b[38;5;124m'\u001b[39m) \u001b[38;5;28;01mas\u001b[39;00m c:\n",
      "\u001b[0;31mIndexError\u001b[0m: list index out of range"
     ]
    }
   ],
   "source": [
    "import json\n",
    "import requests\n",
    "from lxml import etree\n",
    "headers = {\n",
    "    \"User-Agent\":\"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.104 Safari/537.36\"\n",
    "}\n",
    "response = requests.get('https://www.baidu.com/s?wd=粮食&lm=1', headers=headers)\n",
    "r = response.text\n",
    "html = etree.HTML(r, etree.HTMLParser())\n",
    "r1 = html.xpath('//h3')\n",
    "r2 = html.xpath('//*[@class=\"c-abstract\"]')\n",
    "r3 = html.xpath('//*[@class=\"t\"]/a/@href')\n",
    "\n",
    "for i in range(10):\n",
    "    r11 = r1[i].xpath('string(.)')\n",
    "    r22 = r2[i].xpath('string(.)')\n",
    "    r33 = r3[i]\n",
    "    with open('ok.txt', 'a', encoding='utf-8') as c:\n",
    "         c.write(json.dumps(r11,ensure_ascii=False) + '\\n')\n",
    "         c.write(json.dumps(r22, ensure_ascii=False) + '\\n')\n",
    "         c.write(json.dumps(r33, ensure_ascii=False) + '\\n')\n",
    "    print(r11, end='\\n')\n",
    "    print('------------------------')\n",
    "    print(r22, end='\\n')\n",
    "    print(r33)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 47,
   "id": "27bb1ad1-f026-4483-8dd8-cab631c94acc",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T07:59:11.282381Z",
     "iopub.status.busy": "2024-01-30T07:59:11.282067Z",
     "iopub.status.idle": "2024-01-30T07:59:15.322396Z",
     "shell.execute_reply": "2024-01-30T07:59:15.321947Z",
     "shell.execute_reply.started": "2024-01-30T07:59:11.282361Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<html>\n",
      "<head>\n",
      "\t<script>\n",
      "\t\tlocation.replace(location.href.replace(\"https://\",\"http://\"));\n",
      "\t</script>\n",
      "</head>\n",
      "<body>\n",
      "\t<noscript><meta http-equiv=\"refresh\" content=\"0;url=http://www.baidu.com/\"></noscript>\n",
      "</body>\n",
      "</html>\n"
     ]
    }
   ],
   "source": [
    "import requests\n",
    "\n",
    "url = 'https://www.baidu.com/s'\n",
    "params = {'wd': 'Python教程'}\n",
    "response = requests.get(url, params=params)\n",
    "\n",
    "# 打印响应内容\n",
    "print(response.text)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 55,
   "id": "65b09ba1-f80d-4154-ac32-c945caa6d035",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-30T08:11:18.431412Z",
     "iopub.status.busy": "2024-01-30T08:11:18.431106Z",
     "iopub.status.idle": "2024-01-30T08:11:22.428260Z",
     "shell.execute_reply": "2024-01-30T08:11:22.427818Z",
     "shell.execute_reply.started": "2024-01-30T08:11:18.431394Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "开始爬取第1页\n",
      "开始等待1.148107294130934秒\n",
      "响应码是:200\n",
      "正在爬取:https://www.baidu.com/s?wd=海尔集团&pn=0,共查询到0个结果\n",
      "开始爬取第2页\n",
      "开始等待1.0925691923752792秒\n",
      "响应码是:200\n",
      "正在爬取:https://www.baidu.com/s?wd=海尔集团&pn=10,共查询到0个结果\n"
     ]
    }
   ],
   "source": [
    "import requests  # 发送请求\n",
    "from bs4 import BeautifulSoup  # 解析页面\n",
    "import pandas as pd  # 存入csv数据\n",
    "import os  # 判断文件存在\n",
    "from time import sleep  # 等待间隔\n",
    "import random  # 随机\n",
    "import re  # 用正则表达式提取url\n",
    "# 伪装浏览器请求头\n",
    "headers = {\n",
    "\t\"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36\",\n",
    "\t\"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9\",\n",
    "\t\"Accept-Language\": \"zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7\",\n",
    "\t\"Connection\": \"keep-alive\",\n",
    "\t\"Accept-Encoding\": \"gzip, deflate, br\",\n",
    "\t\"Host\": \"www.baidu.com\",\n",
    "\t# 需要更换Cookie\n",
    "\t\"Cookie\": '_xsrf=2|3d8a4662|1e1fb75af408ade84ade1c4fb7d48f3b|1706598465; help_csrf=FsG3QiV8JK4KsfmcUQB4HZB2JhZdPY%2FGNPWuSyuZzIUFNEGdyOcTiuxouRVEuIYXLLykFWDgSZWbBH4uaAeKStnyDFKeyo2H%2BlI7WI%2B6pcsecKRLxOExrDv40kQibwE1Pj4r18g6%2Fn4McPs7U56sZA%3D%3D; cr_token=1b93d112-5f6a-4c03-a24f-1e8ee1c7fe20; _samesite_flag_=true; cookie2=122de321115a2214d0f8678fe5105e25; t=a12853c0b7f29b23faba11d214ae4587; _tb_token_=3d3e575d54897; _hvn_login=6; csg=e4654531; aliyun_lang=zh; login_aliyunid_pk=1057232792513786; login_current_pk=1057232792513786; _bl_uid=Lwl2Rmdykkgdg2jkpgIym0UgI7sh; cna=5SfnHZf1KTMCAbZc/RstFAl3; l=fBxL5cxeTNi5uEK5BOfwPurza77OSIRAguPzaNbMi9fPOkfB5nnRW1UZsFY6C3GVF6-pR3W9-IcwBeYBc3K-nxvtGwBLE8DmndLHR35..; login_aliyunid=\"taobaolijian****\"; login_aliyunid_ticket=bigG2Cd4fWaCmBZHIzsgdZq64XXWQgyKFeuf0vpmV*s*CT58JlM_1t$w3_n$Y5xizvF1iPo1VBsPes0KyqUamvPYYe_GVY115pL_Eapof_BNTwUhTOoNC1ZBeeMfKJzxdnb95hYssNIZor6q7SCxRtGm0; login_aliyunid_csrf=_csrf_tk_1710906598465160; login_aliyunid_pks=\"BG+JvULFgaELn+vVHYqhRd4cbutebNll5rVakKuw+WXBv0=\"; hssid=11e92116-b2c4-4574-a8bc-946086a23a1b; hsite=6; aliyun_country=CN; aliyun_site=CN; tfstk=epBezAmvLLbsvXnfTCvy_DUpFk9pFKUbrTTWqgjkAeY3yeglQZbrF6MQ98XNrNOhPWISz7bdWXOuyWKkUF7yFMGBA_5NrNdB-zLIrO-yDvDH-Bba4a_W-Y6P2Y-lqaEpPkF1vMpJErafU-sdvf0Ag9yXysBZIL4blRGsjBUBE6gcossn9Ci8LSA4ngLGQIY1LKlhXFkoUmKetE2v7YDlQHSwoMVxEYXwYBSyy4KGtpXRT4cythKwllrZ2kq7b8Ilonc-wBEpbEZHtbhJthKwllrZwbdLvh8bxBf..; isg=BHBwrvg44cuKZ7tuzkT84ImGQTjCuVQDVsoChWrBPEueJRDPEskkk8YXfS1FtQzb'}\n",
    "\n",
    "v_max_page = 2\n",
    "v_keyword = '海尔集团'\n",
    "# 获得每页搜索结果\n",
    "for page in range(v_max_page):\n",
    "\tprint('开始爬取第{}页'.format(page + 1))\n",
    "\twait_seconds = random.uniform(1, 2)  # 等待时长秒\n",
    "\tprint('开始等待{}秒'.format(wait_seconds))\n",
    "\tsleep(wait_seconds)  # 随机等待\n",
    "\turl = 'https://www.baidu.com/s?wd=' + v_keyword + '&pn=' + str(page * 10)\n",
    "\tr = requests.get(url, headers=headers)\n",
    "\thtml = r.text\n",
    "\tprint('响应码是:{}'.format(r.status_code))\n",
    "\tsoup = BeautifulSoup(html, 'html.parser')\n",
    "\tresult_list = soup.find_all(class_='result c-container new-pmd')\n",
    "\tprint('正在爬取:{},共查询到{}个结果'.format(url, len(result_list)))\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 56,
   "id": "54ddeeae-8c1b-4708-89af-aa4c3e68234f",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-30T08:13:09.509784Z",
     "iopub.status.busy": "2024-01-30T08:13:09.509288Z",
     "iopub.status.idle": "2024-01-30T08:13:37.882645Z",
     "shell.execute_reply": "2024-01-30T08:13:37.882163Z",
     "shell.execute_reply.started": "2024-01-30T08:13:09.509764Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "开始爬取第1页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=0，共查询到10个结果\n",
      "title is:  1号线徐家汇站遇“紧急情况”?上海地铁:列车车顶瞬间烟雾,...\n",
      "title is:  车厢突然冒烟起火?广州地铁最新回应\n",
      "title is:  上海地铁着火?官方回应来了!_澎湃号·政务_澎湃新闻-The P...\n",
      "title is:  一车厢突然冒烟起火?广州地铁回应来了→\n",
      "title is:  重庆地铁回应地铁行驶途中车厢外迸出火花:设备故障,已维修\n",
      "title is:  突发!上海地铁一号线一列车车顶着火?官方通报来了～\n",
      "title is:  网传一地“地铁车厢起火冒烟”,官方回应\n",
      "title is:  深圳地铁4号线发生火灾?官方公布起火原因,及时疏散无人受伤\n",
      "title is:  广州地铁车厢突然冒烟起火! 官方出面回应, 乘客携带充电宝...\n",
      "title is:  车顶突然冒烟!上海地铁1号线徐家汇站发生火灾!现场曝光!\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第2页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=10，共查询到10个结果\n",
      "title is:  重庆轨道2号线突发闪爆,运营方回应_社会新闻_大众网\n",
      "title is:  上海地铁11号线今晨故障,现场有烟雾火光,两列列车迫停!亲...\n",
      "title is:  车顶突然冒烟!上海地铁1号线徐家汇站发生火灾!现场曝光!_...\n",
      "title is:  上海地铁11号线因短路出现火光?后方列车乘客还原现场情况\n",
      "title is:  网传深圳地铁着火,官方回应\n",
      "title is:  突发故障!乘客被困地铁车厢40分钟后,砸玻璃换气_腾讯新闻\n",
      "title is:  地铁火灾发生原因分析_哈尔滨名岛职业技能培训学校\n",
      "title is:  突发!北约克地铁爆炸!车厢大火浓烟!有人重伤!_Pegg_火灾_...\n",
      "title is:  沪一地铁站口连连起火!罪魁祸首找到了!\n",
      "title is:  广州一地铁车厢突发火情!又是充电宝惹的祸→\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第3页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=20，共查询到10个结果\n",
      "title is:  深圳地铁4号线着火?官方回应来了\n",
      "title is:  北京地铁一号线鲜为人知的往事,列车带病运行引发火灾致6人...\n",
      "title is:  沪一地铁站口连连起火!罪魁祸首找到了!\n",
      "title is:  地铁车厢起火事故应急预案 - 百度文库\n",
      "title is:  地铁火灾事故的成因 - 百度文库\n",
      "title is:  上海地铁11号线列车因短路疑似起火?乘客还原现场情况\n",
      "title is:  突发! 重庆发生安全事件, 地铁闪爆, 乘客砸窗自救, 更多内...\n",
      "title is:  突发!今晨上海地铁11号线发生故障!不断有电光和火星冒出!...\n",
      "title is:  轨道交通起火怎么办?逃生自救看这里!\n",
      "title is:  地铁发生故障、发生火灾怎么办?\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第4页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=30，共查询到10个结果\n",
      "title is:  突发!今晨上海地铁11号线发生故障!不断有电光和火星冒出!...\n",
      "title is:  突发故障!乘客被困地铁车厢40分钟后,砸玻璃换气_腾讯新闻\n",
      "title is:  轨道交通起火怎么办?逃生自救看这里!\n",
      "title is:  地铁发生故障、发生火灾怎么办?\n",
      "title is:  地铁火灾事故应急预案 - 百度文库\n",
      "title is:  智利地铁一节车厢电气故障连续爆炸冒烟 乘客尖叫奔逃_腾讯...\n",
      "title is:  上海地铁11号线突发故障起火地铁应急早先知-世展网\n",
      "title is:  一车厢突然冒烟起火!广州地铁回应→\n",
      "title is:  一车厢突然冒烟起火!广州地铁回应→\n",
      "title is:  车厢突然冒烟起火?广州地铁最新回应\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第5页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=40，共查询到10个结果\n",
      "title is:  上海地铁1号线徐家汇站列车着火?官方回应\n",
      "title is:  上海地铁1号线徐家汇站列车着火?官方回应!\n",
      "title is:  广州地铁一车厢突然冒烟起火,又是它惹的祸\n",
      "title is:  上海轨交11号线一列车冒火光?刚刚,官方回应了!\n",
      "title is:  上海地铁11号线今晨故障,现场有烟雾火光,两列列车迫停!亲...\n",
      "title is:  车顶突然冒烟!上海地铁1号线徐家汇站发生火灾!现场曝光!_...\n",
      "title is:  北京地铁一号线鲜为人知的往事,列车带病运行引发火灾致6人...\n",
      "title is:  上海地铁起火事件通报! - 知乎\n",
      "title is:  上海地铁车顶冒火光浓烟,官方通报:触网故障引发 | 潇湘晨...\n",
      "title is:  智利地铁一节车厢电气故障连续爆炸冒烟 乘客尖叫奔逃__财...\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第6页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=50，共查询到9个结果\n",
      "title is:  视频|智利地铁一车厢电气故障爆炸冒烟 乘客尖叫奔逃__财经...\n",
      "title is:  智利:地铁一车厢电气故障爆炸冒烟 乘客尖叫奔逃_故障_电气...\n",
      "title is:  午夜,北京西站地铁突然\"故障起火\" ..._灭火\n",
      "title is:  【网络辟谣】网传深圳地铁着火,官方回应\n",
      "title is:  惊魂一刻!青岛地铁11号线附近一大罐车突然起火→|火灾|逃...\n",
      "title is:  智利地铁一节车厢电气故障连续爆炸冒烟 乘客尖叫奔逃__财...\n",
      "title is:  “莫斯科地铁着火,警醒我国留学生安全意识”「环俄留学」\n",
      "title is:  ...青岛地铁11号线附近一大罐车突然起火→|大罐车|火灾|设...\n",
      "title is:  2023年地铁火灾事故应急预案(14篇) - 范文网\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第7页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=60，共查询到10个结果\n",
      "title is:  浅析地铁列车火灾处置对策 - 百度文库\n",
      "title is:  乘客背包突然冒烟 武汉地铁回应:手机电池故障所致--快科技...\n",
      "title is:  深挖| 香港地铁纵火案始末\n",
      "title is:  【谣言粉碎机】网传深圳地铁着火,官方回应\n",
      "title is:  地铁火灾的危险性有哪些-地铁火灾的特点有哪些-勤学培训网\n",
      "title is:  冰箱故障排查 冰箱短路!广州北京路地铁站便利店起火 已排...\n",
      "title is:  冰箱故障排查 冰箱短路!广州北京路地铁站便利店起火 已排...\n",
      "title is:  冰箱故障排查 冰箱短路!广州北京路地铁站便利店起火 已排...\n",
      "title is:  突发!上海地铁故障伴有烟雾和火光|车站_网易订阅\n",
      "title is:  11号线列车出现烟雾火光?上海地铁:为出库无人列车,未引发...\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第8页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=70，共查询到10个结果\n",
      "title is:  冰箱故障排查 冰箱短路!广州北京路地铁站便利店起火 已排...\n",
      "title is:  冰箱故障排查 冰箱短路!广州北京路地铁站便利店起火 已排...\n",
      "title is:  冰箱故障排查 冰箱短路!广州北京路地铁站便利店起火 已排...\n",
      "title is:  惊魂一刻!青岛地铁11号线附近一大罐车突然起火→\n",
      "title is:  冰箱故障排查 冰箱短路!广州北京路地铁站便利店起火 已排...\n",
      "title is:  突发!上海地铁故障伴有烟雾和火光|车站_网易订阅\n",
      "title is:  1969年北京地铁火灾事故\n",
      "title is:  伦敦地铁站超级火灾,蔓延速度匪夷所思,科学家:英国人不作...\n",
      "title is:  法国:巴黎一地铁站因电路故障起火_手机新浪网\n",
      "title is:  城市轨道火灾事故应急处理-20231128140732.docx-原创力文档\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第9页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=80，共查询到9个结果\n",
      "title is:  英国:伦敦一地铁起火 系电气故障|24小时_新浪视频\n",
      "title is:  ...号线触网故障引发一列出库无人列车瞬间短路拉弧,并未引...\n",
      "title is:  上海地铁11号线早高峰崩出“火光”,官方:车上无乘客线路已...\n",
      "title is:  城市轨道交通列车在高架、隧道区间时发生火灾的安全管理与...\n",
      "title is:  法国巴黎一地铁站因电路故障起火 致2人轻伤_新闻频道_央视...\n",
      "title is:  102人骨折,地铁列车追尾原因初步查明!安全提示_澎湃号·政...\n",
      "title is:  法国:巴黎一地铁站因电路故障起火_新闻频道_央视网(cctv.c...\n",
      "title is:  西安地铁线缆着火(西安地铁电缆事件死亡人数) - 河南出行网\n",
      "title is:  贵阳地铁3号线开通初期运营前综合应急救援演练举行-贵阳网\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第10页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=90，共查询到8个结果\n",
      "title is:  地铁火灾事故分析与消防安全对策.doc\n",
      "title is:  地铁站火灾应急演练 - 百度文库\n",
      "title is:  上海地铁发生爆炸?官方:系接触网跳闸致照明断电冒烟_运营\n",
      "title is:  太惊险!北京地铁7号线,充电宝突然爆炸,导致乘客全部下车 -...\n",
      "title is:  地铁事故应急处理方案 - 百度文库\n",
      "title is:  突发!10号线又双叒叕坏了!_列车运行_北京_故障\n",
      "title is:  城市地铁交通事故救援演练方案 - 百度文库\n",
      "title is:  北京地铁10号线一列车发生故障,致部分列车延误或临时停车_...\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第11页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=100，共查询到10个结果\n",
      "title is:  广州地铁故障(广州地铁故障率排名) - 快乐出行\n",
      "title is:  上海地铁爆炸?官方:系接触网跳闸引起车厢照明失电和冒烟\n",
      "title is:  地铁事故的应急处理与维修 - 百度文库\n",
      "title is:  地铁八号线车厢冒烟起火花 乘客解锁车门隧道逃生_房产资讯...\n",
      "title is:  上海“地铁2号线爆炸”系谣言 接触网跳闸致车厢照明断电冒...\n",
      "title is:  地铁八号线车厢冒烟起火花 乘客解锁车门隧道逃生_房产资讯...\n",
      "title is:  昆明地铁首期工程火灾报警系统发生火灾报警与火灾故障处置...\n",
      "title is:  我在党的诞生地|黄浦消防开展地铁站点大型灭火救援演练\n",
      "title is:  上海地铁爆炸?官方:系接触网跳闸引起车厢照明失电和冒烟\n",
      "title is:  ...商场内烟雾弥漫?啥情况…|电梯|地铁|梯级|上海市|自动...\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第12页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=110，共查询到9个结果\n",
      "title is:  地铁土城站出现电路故障 车上乘客被紧急疏散-新闻中心-北...\n",
      "title is:  地铁消防安全培训 - 百度文库\n",
      "title is:  城市轨道突发事件案例十篇\n",
      "title is:  地铁事故的应急处理模版课件 - 百度文库\n",
      "title is:  地铁运营中的火灾原因及预防措施研究 - 百度学术\n",
      "title is:  亲历!1号线断电惊魂,广州地铁步步惊人…\n",
      "title is:  两车厢分离,致30余人受伤!北京地铁深夜致歉,救援画面曝光\n",
      "title is:  起火最新资讯_起火最新动态_iPod起火\n",
      "title is:  重庆5号线,凤西路-巴山站,地铁出故障!吓人_网易视频\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第13页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=120，共查询到10个结果\n",
      "title is:  地铁事故精选(九篇)\n",
      "title is:  南京地铁1号线发生多起故障 官博致歉:乘客可到票亭领取致...\n",
      "title is:  上海一地铁站出口电瓶车突自燃 致10多辆车被烧毁 _徐汇区\n",
      "title is:  深圳地铁12号线突发故障,车厢剧烈抖动,乘客从隧道紧急撤离\n",
      "title is:  突发!北京地铁昌平线发生故障,乘客:车厢断开,有人受伤,大...\n",
      "title is:  广州地铁5号线异常事件:一男子紧急情况下车厢内解决个人问...\n",
      "title is:  广州地铁5号线今早突发故障 有乘客滞留-荔枝网\n",
      "title is:  广州地铁5号线今早突发故障 有乘客滞留-荔枝网\n",
      "title is:  全国热搜丨上海地铁回应11号线故障-荔枝网\n",
      "title is:  今早地铁上新!中重度污染!全面禁燃不合法?!北京禁止“老头...\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第14页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=130，共查询到10个结果\n",
      "title is:  早读| 今早地铁上新!中重度污染!全面禁燃不合法?!北京禁止...\n",
      "title is:  青岛地铁5号线发生窒息死亡事故 直接经济损失约226.7万元\n",
      "title is:  一场意外失火,让我们认识了一位“百变”地铁青年-新闻-青...\n",
      "title is:  地铁供电设备差动保护跳闸技术研究丘伟泽 - 百度...\n",
      "title is:  官方公布!青岛一在建地铁站发生事故,致1人死亡|钻机|维修|...\n",
      "title is:  广州:汽车冒烟着火后 车主称理赔难-荔枝网\n",
      "title is:  地铁宋家庄站一扶梯故障,维修人员称一名乘客脚被夹伤\n",
      "title is:  地铁10号线早间列车故障致延误,最新通报来了\n",
      "title is:  面包车撞桥墩起火 热心市民冒险救人-荔枝网\n",
      "title is:  深圳地铁4号线着火,竟是手机“惹祸”!_自燃_充电_电池\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第15页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=140，共查询到10个结果\n",
      "title is:  广东一地铁车厢突然冒烟起火?最新回应→_充电_乘客_爆炸\n",
      "title is:  又是它!一地铁车厢起火冒烟,乘客纷纷避让……_充电_移动电...\n",
      "title is:  午夜,北京西站地铁突然“故障起火” 原来是在演练地下火灾...\n",
      "title is:  冰箱故障排查 冰箱短路!广州北京路地铁站便利店起火 已排...\n",
      "title is:  102人骨折,地铁列车追尾原因初步查明!安全提示_乘客_火灾_...\n",
      "title is:  102人骨折,地铁列车追尾原因初步查明!安全提示_乘客_火灾_...\n",
      "title is:  102人骨折,地铁列车追尾原因初步查明!安全提示_乘客_火灾_...\n",
      "title is:  102人骨折,地铁列车追尾原因初步查明!_乘客_火灾_疏散\n",
      "title is:  上海“地铁2号线爆炸”系谣言 接触网跳闸致车厢照明断电冒...\n",
      "title is:  深圳地铁电缆起火致多趟列车停运 无人伤亡(图)-搜狐新闻\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第16页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=150，共查询到10个结果\n",
      "title is:  浅谈电气火灾云系统在地铁中的应用分析_仪表网\n",
      "title is:  突发!广州地铁二号线冒烟起火\n",
      "title is:  车顶突然冒烟!上海地铁1号线徐家汇站发生火灾!现场曝光!_...\n",
      "title is:  突发!北京地铁昌平线发生严重故障,车厢断开,多人受伤,乘客...\n",
      "title is:  上海地铁突发故障,车顶冒火光浓烟,车上是否有乘客出现争议\n",
      "title is:  广州地铁二号线车厢冒烟起火,现场恐慌混乱,但无人伤亡\n",
      "title is:  1号线徐家汇站遇“紧急情况”?上海地铁:列车车顶瞬间烟雾,...\n",
      "title is:  南京地铁早高峰连发3起故障,乘客领到致歉信,官方:将加强设...\n",
      "title is:  地铁火灾事故案例合集 - 百度文库\n",
      "title is:  一周事故及安全警示(2022年第34期)_黄山市应急管理局\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第17页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=160，共查询到10个结果\n",
      "title is:  今早地铁站着火了?真相原来是这样……_城生活_新民网\n",
      "title is:  地铁火灾场景设计探讨 - 控制工程网-全球工控自动化和智能...\n",
      "title is:  广州一地铁站附近起火 现场火势凶猛冒百米浓烟_手机新浪网\n",
      "title is:  北京13号线地铁轨道高架桥下方仓库起火爆燃视频-闽南网\n",
      "title is:  广州地铁一车厢内冒烟起火,这种物品使用要谨慎→\n",
      "title is:  北京地铁6号线有过跑太快着火冒烟散架吗? - 百度知道\n",
      "title is:  北京地铁6号线二期工地起火 无人伤亡 ‹ 国内 ‹ 门户 ‹...\n",
      "title is:  北京地铁事故汇总版\n",
      "title is:  关于地铁2号线连续发生着火事故的通报西安市人民政府\n",
      "title is:  北京地铁13号线因高架桥下起大火被“截断”_中国发展门户...\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第18页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=170，共查询到10个结果\n",
      "title is:  1号线突发故障!地铁应急知识请收下 - 上海科普网\n",
      "title is:  地铁模拟电器故障导致火灾 百余“乘客”安全撤离|乘客|地...\n",
      "title is:  地铁火灾事故案例(2015-12-26更新)\n",
      "title is:  成都市地铁2号线运营交通故障成因分析与应急对策 |\n",
      "title is:  北京地铁起火原因查明 系风扇线路短路导致(图)--青岛新闻网\n",
      "title is:  为何带故障运行四站 北京地铁列车着火引发反思_新闻中心_...\n",
      "title is:  昆明地铁五号线在建工地起火乘客被困 已扑灭\n",
      "title is:  上海地铁 11 号线触网故障,现场伴有烟雾和火光,并未引发火...\n",
      "title is:  地铁火灾事故应急预案精选5篇,地铁火灾应急处理预案-经验本\n",
      "title is:  地铁火灾的应急预案(通用8篇)\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第19页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=180，共查询到10个结果\n",
      "title is:  上海地铁2号线故障仍在抢修 官方否认起火或撞车-闽南网\n",
      "title is:  如遇地铁列车发生火灾,应这样应急!\n",
      "title is:  上海地铁1号线徐家汇站列车着火?官方回应_央广网\n",
      "title is:  2022年地铁火灾事故的成因国内地铁火灾事故范文 - 豆丁网\n",
      "title is:  简述地铁发生火灾的主要原因 - 百度知道\n",
      "title is:  上海地铁2号线故障5小时逐步恢复,地铁方否认起火撞车传言_...\n",
      "title is:  上海地铁3号线一列车“冒烟” 运营方:触网故障-中新网\n",
      "title is:  乘客背包突然冒烟 武汉地铁回应:手机电池故障所致\n",
      "title is:  地铁车站火灾应急预案(通用3篇)\n",
      "title is:  国内外城市地铁运营安全事故和故障实例 - 豆丁网\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n",
      "开始爬取第20页\n",
      "正在读取：https://www.baidu.com/s?&wd=地铁故障起火&pn=190，共查询到10个结果\n",
      "title is:  [地铁火灾事故的成因]阿塞拜疆巴库地铁火灾 - 豆丁网\n",
      "title is:  盘点中外历史上的重大伤亡地铁火灾\n",
      "title is:  北京地铁列车着火事故引发四大反思_新闻中心_新浪网\n",
      "title is:  上海地铁致歉!11号线触网故障情况说明:未引发火灾,无人员...\n",
      "title is:  纽约曼哈顿地铁因刹车故障造成两节车厢脱轨 致800人被困34...\n",
      "title is:  上海地铁2号线故障仍在抢修 官方否认起火或撞车-闽南网\n",
      "title is:  沈阳地铁表示列车延误和起火无关 是信号故障导致-房产新闻...\n",
      "title is:  地铁火灾的原因与统计浅析\n",
      "title is:  地铁火灾的应急预案(精选12篇)\n",
      "title is:  地铁火灾事故分析解析\n",
      "结果保存成功：百度爬虫地铁故障起火_前20页.csv\n"
     ]
    }
   ],
   "source": [
    "import os\n",
    "import random  \n",
    "import time\n",
    "import pandas as pd  \n",
    "import requests \n",
    "from bs4 import BeautifulSoup\n",
    "\n",
    "headers = {\n",
    "    \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36 Edg/109.0.1518.70\",\n",
    "    \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9\",\n",
    "    \"Accept-Language\": \"zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6\",\n",
    "    \"Connection\": \"keep-alive\",\n",
    "    \"Accept-Encoding\": \"gzip, deflate\",\n",
    "    \"Host\": \"www.baidu.com\",\n",
    "    \"Cookie\": \"BIDUPSID=E5DC3B4CB152A27DBF1D270E3503794B; PSTM=167872138 ZFY=DqhB1QpFu:APeJVy:AOeNNGsu1YREtMxYZgrqntwJNQlE:C; delPer=0; BD1\"\n",
    "}\n",
    "def baidu_search(v_keyword, v_result_file, v_max_page):\n",
    "    \"\"\"\n",
    "      :param v_keyword: 搜索关键词\n",
    "      :param v_result_file: 保存文件名\n",
    "      :param v_max_page: 爬取前几页\n",
    "      :return:\n",
    "      \"\"\"\n",
    "    for page in range(v_max_page):\n",
    "        print('开始爬取第{}页'.format(page + 1))\n",
    "        # wait_seconds = random.uniform(5, 10)\n",
    "        # print('开始等待{}秒'.format(wait_seconds))\n",
    "        # time.sleep(wait_seconds)\n",
    "        url = 'https://www.baidu.com/s?&wd=' + v_keyword + '&pn=' + str(page * 10)\n",
    "        r = requests.get(url, headers=headers)\n",
    "        html = r.text\n",
    "        soup =  BeautifulSoup(html)\n",
    "        result_list = soup.find_all(class_='result c-container xpath-log new-pmd')\n",
    "        print('正在读取：{}，共查询到{}个结果'.format(url, len(result_list)))\n",
    "        kw_list = []\n",
    "        page_list = []\n",
    "        title_list = []\n",
    "        href_list = []\n",
    "        desc_list = []\n",
    "        site_list = []\n",
    "        for result in result_list:\n",
    "            title = result.find('a').text\n",
    "            print('title is: ', title)\n",
    "            href = result.find('a')['href']\n",
    "            try:\n",
    "                desc = result.find(class_=\"c-container\").text\n",
    "            except:\n",
    "                desc = \"\"\n",
    "            try:\n",
    "                site = result.find(class_=\"c-color-gray\").text\n",
    "            except:\n",
    "                site = \"\"\n",
    "            kw_list.append(v_keyword)\n",
    "            page_list.append(page + 1)\n",
    "            title_list.append(title)\n",
    "            href_list.append(href)\n",
    "            desc_list.append(desc)\n",
    "            site_list.append(site)\n",
    "        df = pd.DataFrame(\n",
    "            {\n",
    "                '关键词': kw_list,\n",
    "                '页码': page_list,\n",
    "                '标题': title_list,\n",
    "                '百度链接': href_list,\n",
    "                '简介': desc_list,\n",
    "                '网站名称': site_list,\n",
    "\n",
    "            }\n",
    "        )\n",
    "        if os.path.exists(v_result_file):\n",
    "            header = None\n",
    "        else:\n",
    "            header = ['关键词', '页码', '标题', '百度链接', '简介', '网站名称']\n",
    "        df.to_csv(v_result_file, mode='a+', index=False, header=header, encoding='utf_8_sig')\n",
    "        print('结果保存成功：{}'.format(v_result_file))\n",
    "    \n",
    "\n",
    "if __name__ == '__main__':\n",
    "    search_keyword = '地铁故障起火'\n",
    "    max_page = 20\n",
    "    result_file = '百度爬虫{}_前{}页.csv'.format(search_keyword, max_page)\n",
    "    if os.path.exists(result_file):\n",
    "        os.remove(result_file)\n",
    "        print('结果文件({})存在，已删除'.format(result_file))\n",
    "    baidu_search( search_keyword, result_file, max_page)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "id": "aa7c94a3-d4c7-46d9-8137-5b37bd55ddbd",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-31T03:03:35.411172Z",
     "iopub.status.busy": "2024-01-31T03:03:35.410807Z",
     "iopub.status.idle": "2024-01-31T03:03:40.167006Z",
     "shell.execute_reply": "2024-01-31T03:03:40.166571Z",
     "shell.execute_reply.started": "2024-01-31T03:03:35.411150Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "结果文件(百度爬虫海尔集团_前2页.csv)存在，已删除\n",
      "开始爬取第1页\n",
      "开始等待1.7519690376217378秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=海尔集团&pn=0，共查询到6个结果\n",
      "title is:  海尔集团官网- 以无界生态共创无限可能\n",
      "title is:  海尔集团官网-物联网时代引领的生态品牌\n",
      "title is:  海尔官网-海尔智家 定制美好生活\n",
      "title is:  集团简介-海尔集团官网\n",
      "title is:  海尔生态-海尔集团官网\n",
      "title is:  海尔在全球-海尔集团官网\n",
      "结果保存成功：百度爬虫海尔集团_前2页.csv\n",
      "开始爬取第2页\n",
      "开始等待1.7289632048245471秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=海尔集团&pn=10，共查询到10个结果\n",
      "title is:  海尔公司概况-海尔官网\n",
      "title is:  品牌内涵-海尔官网\n",
      "title is:  海尔荣誉-海尔集团官网\n",
      "title is:  生态品牌 , 世界品牌的新动能-海尔集团官网\n",
      "title is:  海尔| 2022逆势增长,2023创业再出发!-海尔集团官网\n",
      "title is:  全行业解决方案  - 海尔商用解决方案-海尔官网\n",
      "title is:  发展历程-海尔集团官网\n",
      "title is:  海尔集团2023年工作总结表彰大会暨第八届职工代表大会第五...\n",
      "title is:  海尔集团2023业绩持续双增长\n",
      "title is:  海尔商用解决方案-海尔官网\n",
      "结果保存成功：百度爬虫海尔集团_前2页.csv\n"
     ]
    }
   ],
   "source": [
    "## https://cloud.tencent.com/developer/article/2322038\n",
    "\n",
    "import os\n",
    "import random  \n",
    "import time\n",
    "import pandas as pd  \n",
    "import requests \n",
    "from bs4 import BeautifulSoup\n",
    "\n",
    "headers = {\n",
    "    \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36 Edg/109.0.1518.70\",\n",
    "    \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9\",\n",
    "    \"Accept-Language\": \"zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6\",\n",
    "    \"Connection\": \"keep-alive\",\n",
    "    \"Accept-Encoding\": \"gzip, deflate\",\n",
    "    \"Host\": \"www.baidu.com\",\n",
    "    \"Cookie\": \"BIDUPSID=E5DC3B4CB152A27DBF1D270E3503794B; PSTM=167872138 ZFY=DqhB1QpFu:APeJVy:AOeNNGsu1YREtMxYZgrqntwJNQlE:C; delPer=0; BD1\"\n",
    "}\n",
    "def baidu_search(v_keyword, v_result_file, v_max_page):\n",
    "    \"\"\"\n",
    "      :param v_keyword: 搜索关键词\n",
    "      :param v_result_file: 保存文件名\n",
    "      :param v_max_page: 爬取前几页\n",
    "      :return:\n",
    "      \"\"\"\n",
    "    for page in range(v_max_page):\n",
    "        print('开始爬取第{}页'.format(page + 1))\n",
    "        wait_seconds = random.uniform(1, 3)\n",
    "        print('开始等待{}秒'.format(wait_seconds))\n",
    "        time.sleep(wait_seconds)\n",
    "        url = 'https://www.baidu.com/s?&wd=' + v_keyword + '&pn=' + str(page * 10)\n",
    "        r = requests.get(url, headers=headers)\n",
    "        html = r.text\n",
    "        print('ljtljt search——resul:{html}')\n",
    "        soup =  BeautifulSoup(html)\n",
    "        result_list = soup.find_all(class_='result c-container xpath-log new-pmd')\n",
    "        print('正在读取：{}，共查询到{}个结果'.format(url, len(result_list)))\n",
    "        kw_list = []\n",
    "        page_list = []\n",
    "        title_list = []\n",
    "        href_list = []\n",
    "        desc_list = []\n",
    "        site_list = []\n",
    "        for result in result_list:\n",
    "            title = result.find('a').text\n",
    "            print('title is: ', title)\n",
    "            href = result.find('a')['href']\n",
    "            try:\n",
    "                desc = result.find(class_=\"c-container\").text\n",
    "            except:\n",
    "                desc = \"\"\n",
    "            try:\n",
    "                site = result.find(class_=\"c-color-gray\").text\n",
    "            except:\n",
    "                site = \"\"\n",
    "            kw_list.append(v_keyword)\n",
    "            page_list.append(page + 1)\n",
    "            title_list.append(title)\n",
    "            href_list.append(href)\n",
    "            desc_list.append(desc)\n",
    "            site_list.append(site)\n",
    "        df = pd.DataFrame(\n",
    "            {\n",
    "                '关键词': kw_list,\n",
    "                '页码': page_list,\n",
    "                '标题': title_list,\n",
    "                '百度链接': href_list,\n",
    "                '简介': desc_list,\n",
    "                '网站名称': site_list,\n",
    "\n",
    "            }\n",
    "        )\n",
    "        if os.path.exists(v_result_file):\n",
    "            header = None\n",
    "        else:\n",
    "            header = ['关键词', '页码', '标题', '百度链接', '简介', '网站名称']\n",
    "        df.to_csv(v_result_file, mode='a+', index=False, header=header, encoding='utf_8_sig')\n",
    "        print('结果保存成功：{}'.format(v_result_file))\n",
    "    \n",
    "\n",
    "if __name__ == '__main__':\n",
    "    search_keyword = '海尔集团'\n",
    "    max_page = 2\n",
    "    result_file = '百度爬虫{}_前{}页.csv'.format(search_keyword, max_page)\n",
    "    if os.path.exists(result_file):\n",
    "        os.remove(result_file)\n",
    "        print('结果文件({})存在，已删除'.format(result_file))\n",
    "    baidu_search( search_keyword, result_file, max_page)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "70553f23-7325-4ade-b47a-a7a8dd993cdc",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-31T03:03:06.879926Z",
     "iopub.status.busy": "2024-01-31T03:03:06.879606Z",
     "iopub.status.idle": "2024-01-31T03:03:10.199308Z",
     "shell.execute_reply": "2024-01-31T03:03:10.198746Z",
     "shell.execute_reply.started": "2024-01-31T03:03:06.879907Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Looking in indexes: https://mirrors.aliyun.com/pypi/simple\n",
      "Collecting BeautifulSoup4\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl (147 kB)\n",
      "\u001b[2K     \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m147.9/147.9 kB\u001b[0m \u001b[31m11.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
      "\u001b[?25hCollecting soupsieve>1.2 (from BeautifulSoup4)\n",
      "  Downloading https://mirrors.aliyun.com/pypi/packages/4c/f3/038b302fdfbe3be7da016777069f26ceefe11a681055ea1f7817546508e3/soupsieve-2.5-py3-none-any.whl (36 kB)\n",
      "\u001b[33mDEPRECATION: omegaconf 2.0.6 has a non-standard dependency specifier PyYAML>=5.1.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of omegaconf or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0m\u001b[33mDEPRECATION: pytorch-lightning 1.7.7 has a non-standard dependency specifier torch>=1.9.*. pip 24.0 will enforce this behaviour change. A possible replacement is to upgrade to a newer version of pytorch-lightning or contact the author to suggest that they release a version with a conforming dependency specifiers. Discussion can be found at https://github.com/pypa/pip/issues/12063\u001b[0m\u001b[33m\n",
      "\u001b[0mInstalling collected packages: soupsieve, BeautifulSoup4\n",
      "Successfully installed BeautifulSoup4-4.12.3 soupsieve-2.5\n",
      "\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n",
      "\u001b[0m"
     ]
    }
   ],
   "source": [
    "!pip install BeautifulSoup4"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "f14808ca-1be6-4d66-ab62-858cf444daad",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-31T03:08:18.702464Z",
     "iopub.status.busy": "2024-01-31T03:08:18.701958Z",
     "iopub.status.idle": "2024-01-31T03:09:35.342708Z",
     "shell.execute_reply": "2024-01-31T03:09:35.342221Z",
     "shell.execute_reply.started": "2024-01-31T03:08:18.702437Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "开始爬取第1页\n",
      "开始等待2.0996334908153003秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=三明市中油鸿圣油品销售有限公司&pn=0，共查询到9个结果\n",
      "title is:  三明市中油鸿圣油品销售有限公司怎么样 - 爱企查\n",
      "title is:  三明市中油鸿圣油品销售有限公司 - 企查查\n",
      "title is:  三明市中油鸿圣油品销售有限公司分支机构查询 - 天眼查\n",
      "title is:  三明市中油鸿圣油品销售有限公司明溪中业加油站 - 天眼查\n",
      "title is:  三圣加油站 - 企业/公司信息查询 - 爱企查\n",
      "title is:  陈世彪同名企业查询_陈世彪同名公司基本信息-企查猫(企业...\n",
      "title is:  三桶油\n",
      "title is:  电子商务企业信息查询-电子商务行业相关公司-电子商务企业...\n",
      "title is:  尤溪县应急管理局2023年行政许可信息公示(9月25日) _ 行政...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待1.916279986820523秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=三明市中油鸿圣油品销售有限公司&pn=10，共查询到9个结果\n",
      "title is:  ...年11月创新型中小企业拟推荐名单的公示 _ 通知公告 _ ...\n",
      "title is:  🥜油\n",
      "title is:  关于2022年度绿色企业、项目入库奖励拟发放企业名单的公示...\n",
      "title is:  三明市统计局2023年行政处罚信息公示 _ 文件公告 _ 三明市...\n",
      "title is:  三明市商务局成品油零售经营资格审批服务事项公告 _ 通知...\n",
      "title is:  郑崇浩 - 三明市中油嘉投油品销售有限公司法定代表人/股东...\n",
      "title is:  三明市市场监督管理局行政处罚决定书 _ 通知公告 _ 三明市...\n",
      "title is:  ...初级职务任职资格的通知 _ 最新文件 _ 三明市人力资源...\n",
      "title is:  三元区(北部)十二月份一至十五号“双随机一公开”监督抽查...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待2.193410131392974秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海三方半汽车科技有限公司&pn=0，共查询到9个结果\n",
      "title is:  上海三方半汽车科技有限公司 - 企查查\n",
      "title is:  上海三方半汽车科技有限公司联系方式-番番寻客宝\n",
      "title is:  上海三方半汽车科技有限公司是否有法律诉讼 - 启信宝\n",
      "title is:  全联三方汽车科技(上海)有限公司 - 爱企查\n",
      "title is:  上海三人行汽车科技有限公司-技术服务厂家\n",
      "title is:  上海三人行汽车科技有限公司 - 爱企查\n",
      "title is:  「车镇怎么样」深圳市易车合创科技有限公司 - 职友集\n",
      "title is:  悦达集团 - 同喜悦 共发达\n",
      "title is:  上海永茂泰汽车科技股份有限公司\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待1.7094967798833725秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海三方半汽车科技有限公司&pn=10，共查询到10个结果\n",
      "title is:  万马前程 - 万马前程公司 - 万马前程竞品公司信息 - 天眼查\n",
      "title is:  【大众汽车金融招聘】-猎聘\n",
      "title is:  汽车销售顾问(仰望)就业前景_比亚迪汽车工业2024年汽车销...\n",
      "title is:  上海领克汽车科技有限公司_上海领克汽车科技有限公司官网\n",
      "title is:  上海沿锋汽车科技股份有限公司【官网】\n",
      "title is:  一文读懂动力电池技术路线之争及中上游产业链(下)_财富号_...\n",
      "title is:  深市上市公司公告(10月12日)\n",
      "title is:  宏发科技股份有限公司_会议_报告_情况\n",
      "title is:  证券日报网-上海先惠自动化技术股份有限公司 关于自愿披露...\n",
      "title is:  上海新动力汽车科技股份有限公司2023年半年度报告摘要_手...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待1.625972670692334秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海中绿新能源科技有限公司&pn=0，共查询到7个结果\n",
      "title is:  上海中绿新能源科技有限公司 - 企查查\n",
      "title is:  上海中绿新能源科技有限公司 - 天眼查\n",
      "title is:  上海中绿新能源科技有限公司 - 单位一览\n",
      "title is:  上海中绿新能源科技有限公司\n",
      "title is:  上海上海中绿新能源科技公司-设备研发部-软件研发招聘-202...\n",
      "title is:  个股资金流向查询_个股行情_手机同花顺财经\n",
      "title is:  上海中绿新能源科技公司怎么样 - 百度知道\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待1.4994688451548466秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海中绿新能源科技有限公司&pn=10，共查询到10个结果\n",
      "title is:  「上海中绿优新能源科技有限公司招聘」-BOSS直聘\n",
      "title is:  【中绿】_中绿招聘-猎聘\n",
      "title is:  上海中能绿享新能源有限公司成立,注册资本1.00亿元人民币-...\n",
      "title is:  中绿集团官网\n",
      "title is:  上海中能绿享新能源有限公司成立,注册资本1.00亿元人民币_...\n",
      "title is:  中能绿科(上海)技术有限公司 - 爱企查\n",
      "title is:  市经济信息化委 市发展改革委关于公布2023年度(第二批)绿...\n",
      "title is:  ...惊涛骇浪”:八天七板龙头股办公地址位于浦东,一众央企...\n",
      "title is:  中能绿科(上海)技术有限公司_【工商信息_信用信息_纳税人...\n",
      "title is:  > 公示\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待2.4716754153655582秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海兆莹自控设备有限公司&pn=0，共查询到8个结果\n",
      "title is:  ...式有毒气体检测仪|硫化氢气体检测仪-上海兆莹自控设备...\n",
      "title is:  上海兆莹自控设备有限公司 - 主要人员 - 爱企查\n",
      "title is:  「上海兆莹自控设备有限公司招聘」-BOSS直聘\n",
      "title is:  气体检测仪_上海兆莹自控设备有限公司\n",
      "title is:  上海兆莹自控设备有限公司 - 企查查\n",
      "title is:  上海兆莹自控设备有限公司企业发展 - 企查查\n",
      "title is:  上海兆莹自控设备有限公司 - 企业资质证书查询 - 爱企查\n",
      "title is:  上海兆莹自控设备有限公司怎么样 - 百度知道\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待2.6322731084824476秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海兆莹自控设备有限公司&pn=10，共查询到10个结果\n",
      "title is:  上海兆莹自控设备有限公司\n",
      "title is:  sh-zhaoying.com的备案信息 - 站长工具\n",
      "title is:  气体检测仪安装工装_2021212212158_权利要求书_专利查询_...\n",
      "title is:  ...上海|闵行区|技术有限公司|设备有限公司|科技有限公司_...\n",
      "title is:  一种用于可燃气体的测爆仪专利_专利申请于2020-07-08_专利...\n",
      "title is:  (12)发明专利申请\n",
      "title is:  YK-P余压探测器-空气质量控制器-能耗\n",
      "title is:  2022年度上海市第四批拟认定高新技术企业名单公示 - 上海...\n",
      "title is:  「上海兆莹自控设备有限公司招聘」-BOSS直聘\n",
      "title is:  何李娜 - 上海兆莹自控设备有限公司 - 法定代表人/高管/股...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待1.8816186017487502秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海凡默谷信息技术有限公司&pn=0，共查询到8个结果\n",
      "title is:  上海凡默谷信息技术有限公司 - 企查查\n",
      "title is:  上海凡默谷信息技术有限公司 - 主要人员 - 爱企查\n",
      "title is:  上海凡默谷信息技术有限公司微信公众号查询 - 天眼查\n",
      "title is:  上海凡默谷信息技术有限公司怎么样 - 职友集\n",
      "title is:  上海凡默谷信息技术 - 上海凡默谷信息技术公司 - 上海凡默...\n",
      "title is:  上海凡默谷信息技术有限公司 (中国(上海)自由贸易试验区峨...\n",
      "title is:  上海凡默谷信息技术有限公司法律风险 - 企查查\n",
      "title is:  上海凡默谷信息技术有限公司_股东信息_名单_持股比例_企业...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待1.5931873331101118秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海凡默谷信息技术有限公司&pn=10，共查询到10个结果\n",
      "title is:  上海凡默谷信息技术有限公司 - 启信宝\n",
      "title is:  上海凡默谷信息技术有限公司-计算机厂家\n",
      "title is:  上海凡默谷信息技术有限公司\n",
      "title is:  【上海凡默谷信息技术有限公司招聘_上海-浦东新区招聘信息...\n",
      "title is:  上海凡默谷信息技术有限公司和泛微网络对比哪个好(规模排...\n",
      "title is:  【凡默谷信息技术上海】_凡默谷信息技术上海招聘-猎聘\n",
      "title is:  林翠芳 - 上海凡默谷信息技术有限公司 - 法定代表人/高管/...\n",
      "title is:  ​上海凡默谷信息技术有限公司\n",
      "title is:  上海凡默谷信息技术有限公司最新招聘信息|公司评价|职位动...\n",
      "title is:  ...生理学的口腔黏膜给药模拟软件_上海凡默谷信息技术有限...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待2.1220507474206秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海北半秋景观设计咨询有限公司&pn=0，共查询到8个结果\n",
      "title is:  上海北半秋景观设计咨询有限公司 - 企业/公司信息查询 - ...\n",
      "title is:  上海北半秋景观设计咨询有限公司 - 爱企查\n",
      "title is:  「上海北半秋景观设计咨询有限公司招聘」-BOSS直聘\n",
      "title is:  上海北半秋景观设计咨询有限公司-上海北半秋景观设计咨询...\n",
      "title is:  上海北半秋景观设计咨询有限公司怎么样 - 职友集\n",
      "title is:  上海北半秋景观设计咨询有限公司 - 企查查\n",
      "title is:  上海北半秋景观设计咨询有限公司 - 天眼查\n",
      "title is:  上海北半秋景观设计咨询有限公司企业信用报告-天...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待2.6701635956903655秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海北半秋景观设计咨询有限公司&pn=10，共查询到10个结果\n",
      "title is:  上海北半秋景观设计咨询有限公司_微信公众号_企业微信公众...\n",
      "title is:  上海北半秋景观设计咨询有限公司\n",
      "title is:  上海北半秋景观设计咨询有限公司招聘信息 - 筑招建筑人才网\n",
      "title is:  【上海北半秋景观设计咨询有限公司招聘|待遇|面试|怎么样...\n",
      "title is:  上海北半秋景观设计咨询有限公司怎么样 - 百度知道\n",
      "title is:  上海北半球景观设计咨询有限公司_找工作_景观人才网\n",
      "title is:  上海北半秋景观设计咨询有限公司简介-建筑英才网\n",
      "title is:  上海北半球景观设计咨询有限公司\n",
      "title is:  上海北半秋景观设计咨询有限公司联系方式-番番寻客宝\n",
      "title is:  招聘信息|招聘岗位|最新职位信息-智联招聘移动站\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待2.021023721962056秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海华电新浜能源有限公司&pn=0，共查询到9个结果\n",
      "title is:  上海华电新浜能源有限公司 - 天眼查\n",
      "title is:  上海嘉定华电新能发电有限公司 - 企查查\n",
      "title is:  华电新能源集团股份有限公司是国企吗 - 百度知道\n",
      "title is:  「上海嘉定华电新能发电有限公司招聘」-BOSS直聘\n",
      "title is:  华电能源 2.03(2.01%)_股票行情_新浪财经_新浪网\n",
      "title is:  上海嘉定华电新能发电有限公司 - 主要人员 - 爱企查\n",
      "title is:  华能新能源上海发电有限公司 - 天眼查\n",
      "title is:  华电新能源集团股份有限公司\n",
      "title is:  上海浦东华电新能新能源发展有限公司经营信息 - 企查查\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待2.720457802085913秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海华电新浜能源有限公司&pn=10，共查询到10个结果\n",
      "title is:  上海能源科技发展有限公司 - 天眼查\n",
      "title is:  中国华电集团\n",
      "title is:  中国华电集团有限公司-招聘信息\n",
      "title is:  华电新能源集团股份有限公司_招标采购平台-首页✅\n",
      "title is:  中国华电集团清洁能源有限公司\n",
      "title is:  上海浦东华电福新新能源有限公司-建设工程施工厂家\n",
      "title is:  华电江苏能源有限公司-要闻速递\n",
      "title is:  助力企业复工复产|中国华电全力保供能,助重点企业应急复工...\n",
      "title is:  上海能源(600508)_股票价格_行情_走势图—东方财富网\n",
      "title is:  上海浦东华电新能新能源发展有限公司 - 爱企查\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待1.7453092435605289秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海嘉能建业实业有限公司&pn=0，共查询到10个结果\n",
      "title is:  上海嘉能建业实业有限公司 - 企业/公司信息查询 - 爱企查\n",
      "title is:  上海嘉能建业实业有限公司 - 变更记录 - 爱企查\n",
      "title is:  上海嘉能建业实业有限公司最新企业年报 - 爱企查\n",
      "title is:  广东嘉能建筑有限公司_股东信息_名单_持股比例_企业查询 -...\n",
      "title is:  余华光 - 法定代表人/高管/股东 - 上海嘉能建业实业有限公...\n",
      "title is:  上海嘉能建业实业有限公司与上海禹康贸易有限公...\n",
      "title is:  北京产权交易所_嘉能公司推介基本资料\n",
      "title is:  关注| 27个项目,总投资额再超百亿!杨浦区又一批重大产业项...\n",
      "title is:  增值税防伪税控系统最高开票限额事项(2021年3月)\n",
      "title is:  总投资再次超百亿!27个重大产业项目集中落地杨浦_上海\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待2.6956153499328273秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海嘉能建业实业有限公司&pn=10，共查询到10个结果\n",
      "title is:  广东嘉能建筑有限公司 - 企查查\n",
      "title is:  上海建能建筑工程有限公司 - 企查查\n",
      "title is:  邝新华-企业工商信用信息,电话地址查询-阿里巴巴企业信用\n",
      "title is:  上海市2022年第3批入库科技型中小企业名单\n",
      "title is:  上海嘉控实业有限公司【官网】\n",
      "title is:  【上海南大开发建设有限公司招聘】-猎聘\n",
      "title is:  岳阳市人力资源服务中心-人事代理厂家\n",
      "title is:  关于拨付财政扶持资金的说明\n",
      "title is:  关注| 27个项目,总投资额再超百亿!杨浦区又一批重大产业项...\n",
      "title is:  27个项目,总投资额再超百亿!杨浦区又一批重大产业项目落地...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待1.5707553200166144秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海岩山科技股份有限公司&pn=0，共查询到7个结果\n",
      "title is:  岩山科技(sz002195)-新浪财经\n",
      "title is:  岩山科技(sz002195)-新浪财经\n",
      "title is:  岩山科技(002195)_股票价格_行情_走势图—东方财富网\n",
      "title is:  岩山科技(002195)资金流向_个股行情_同花顺财经\n",
      "title is:  岩山科技 2.59(-1.52%)_股票行情_新浪财经_新浪网\n",
      "title is:  上海岩山科技股份有限公司_核心团队_高管_核心人员_公司查...\n",
      "title is:  上海岩山科技股份有限公司 - 天眼查\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待2.5545862880803485秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海岩山科技股份有限公司&pn=10，共查询到10个结果\n",
      "title is:  「岩山科技招聘要求」上海岩山科技股份有限公司什么学历经...\n",
      "title is:  上海岩山科技股份有限公司-企业工商信用信息,电话,地址,法...\n",
      "title is:  上海岩山科技股份有限公司经营风险 - 企查查\n",
      "title is:  上海岩山科技股份有限公司-同花顺企业数据库\n",
      "title is:  上海岩山科技股份有限公司-企业可信百科-水滴信用\n",
      "title is:  公司公告_岩山科技:2023年度业绩预告新浪财经_新浪网\n",
      "title is:  岩山科技(002195)股吧_岩山科技怎么样_分析讨论社区—东方...\n",
      "title is:  公司资料 - 上海理工大学就业信息服务网\n",
      "title is:  [岩山科技]上市_上海岩山科技股份有限公司_投资界\n",
      "title is:  上海岩山科技股份有限公司2023年第四次临时股东大会决议公...\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第1页\n",
      "开始等待2.1810677629027584秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海工融战鑫私募基金合伙企业（有限合伙）&pn=0，共查询到9个结果\n",
      "title is:  个股资金流向查询_个股行情_手机同花顺财经\n",
      "title is:  工银资本联合中国电子设立投资基金 注册资本100亿元\n",
      "title is:  上海盛迪私募基金管理有限公司 - 天眼查\n",
      "title is:  私募基金管理人公示 - 中国基金业协会\n",
      "title is:  上海工融科创私募基金合伙企业(有限合伙) - 企查查\n",
      "title is:  上海钢联 - 词条页面_百科_东方财富网\n",
      "title is:  工银资本管理有限公司企业发展 - 企查查\n",
      "title is:  杨春宝等:2023年度私募基金行业31个典型判例(二)|股权|托...\n",
      "title is:  并购重组 _ 数据中心 _ 东方财富网\n",
      "结果保存成功：爬虫结果.csv\n",
      "开始爬取第2页\n",
      "开始等待2.041552561657629秒\n",
      "ljtljt search——resul:{html}\n",
      "正在读取：https://www.baidu.com/s?&wd=上海工融战鑫私募基金合伙企业（有限合伙）&pn=10，共查询到10个结果\n",
      "title is:  工银金融资产投资有限公司 - 天眼查\n",
      "title is:  股友E9510w7662_财富号_专业的财经自媒体平台\n",
      "title is:  上海钦沐资产管理合伙企业\n",
      "title is:  私募投资基金监督管理办法 (征求意见稿)\n",
      "title is:  上海鑫震私募基金管理有限公司 - 企查查\n",
      "title is:  优宁维(301166)-公司公告-优宁维:关于参与投资设立基金的...\n",
      "title is:  ...300677.SZ)公告,公司拟与上海融玺创业投资管理有限公司...\n",
      "title is:  ...参与投资海宁擎领创业投资基金合伙企业(有限合伙)的公...\n",
      "title is:  上海鲲鑫工融私募基金合伙企业(有限合伙) - 爱企查\n",
      "title is:  词条页面_百科_东方财富网\n",
      "结果保存成功：爬虫结果.csv\n"
     ]
    }
   ],
   "source": [
    "## https://cloud.tencent.com/developer/article/2322038\n",
    "\n",
    "import os\n",
    "import random  \n",
    "import time\n",
    "import pandas as pd  \n",
    "import requests \n",
    "from bs4 import BeautifulSoup\n",
    "\n",
    "headers = {\n",
    "    \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36 Edg/109.0.1518.70\",\n",
    "    \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9\",\n",
    "    \"Accept-Language\": \"zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6\",\n",
    "    \"Connection\": \"keep-alive\",\n",
    "    \"Accept-Encoding\": \"gzip, deflate\",\n",
    "    \"Host\": \"www.baidu.com\",\n",
    "    \"Cookie\": \"BIDUPSID=E5DC3B4CB152A27DBF1D270E3503794B; PSTM=167872138 ZFY=DqhB1QpFu:APeJVy:AOeNNGsu1YREtMxYZgrqntwJNQlE:C; delPer=0; BD1\"\n",
    "}\n",
    "def baidu_search(v_keyword, v_result_file, v_max_page):\n",
    "    \"\"\"\n",
    "      :param v_keyword: 搜索关键词\n",
    "      :param v_result_file: 保存文件名\n",
    "      :param v_max_page: 爬取前几页\n",
    "      :return:\n",
    "      \"\"\"\n",
    "    for page in range(v_max_page):\n",
    "        print('开始爬取第{}页'.format(page + 1))\n",
    "        wait_seconds = random.uniform(1, 3)\n",
    "        print('开始等待{}秒'.format(wait_seconds))\n",
    "        time.sleep(wait_seconds)\n",
    "        url = 'https://www.baidu.com/s?&wd=' + v_keyword + '&pn=' + str(page * 10)\n",
    "        r = requests.get(url, headers=headers)\n",
    "        html = r.text\n",
    "        print('ljtljt search——resul:{html}')\n",
    "        soup =  BeautifulSoup(html)\n",
    "        result_list = soup.find_all(class_='result c-container xpath-log new-pmd')\n",
    "        print('正在读取：{}，共查询到{}个结果'.format(url, len(result_list)))\n",
    "        kw_list = []\n",
    "        page_list = []\n",
    "        title_list = []\n",
    "        href_list = []\n",
    "        desc_list = []\n",
    "        site_list = []\n",
    "        for result in result_list:\n",
    "            title = result.find('a').text\n",
    "            print('title is: ', title)\n",
    "            href = result.find('a')['href']\n",
    "            try:\n",
    "                desc = result.find(class_=\"c-container\").text\n",
    "            except:\n",
    "                desc = \"\"\n",
    "            try:\n",
    "                site = result.find(class_=\"c-color-gray\").text\n",
    "            except:\n",
    "                site = \"\"\n",
    "            kw_list.append(v_keyword)\n",
    "            page_list.append(page + 1)\n",
    "            title_list.append(title)\n",
    "            href_list.append(href)\n",
    "            desc_list.append(desc)\n",
    "            site_list.append(site)\n",
    "        df = pd.DataFrame(\n",
    "            {\n",
    "                '关键词': kw_list,\n",
    "                '页码': page_list,\n",
    "                '标题': title_list,\n",
    "                '百度链接': href_list,\n",
    "                '简介': desc_list,\n",
    "                '网站名称': site_list,\n",
    "\n",
    "            }\n",
    "        )\n",
    "        if os.path.exists(v_result_file):\n",
    "            header = None\n",
    "        else:\n",
    "            header = ['关键词', '页码', '标题', '百度链接', '简介', '网站名称']\n",
    "        df.to_csv(v_result_file, mode='a+', index=False, header=header, encoding='utf_8_sig')\n",
    "        print('结果保存成功：{}'.format(v_result_file))\n",
    "    \n",
    "\n",
    "if __name__ == '__main__':\n",
    "    search_keyword_list = ['三明市中油鸿圣油品销售有限公司','上海三方半汽车科技有限公司','上海中绿新能源科技有限公司','上海兆莹自控设备有限公司','上海凡默谷信息技术有限公司','上海北半秋景观设计咨询有限公司','上海华电新浜能源有限公司','上海嘉能建业实业有限公司','上海岩山科技股份有限公司','上海工融战鑫私募基金合伙企业（有限合伙）']\n",
    "    max_page = 2\n",
    "    result_file = '爬虫结果.csv'\n",
    "    if os.path.exists(result_file):\n",
    "        os.remove(result_file)\n",
    "        print('结果文件({})存在，已删除'.format(result_file))\n",
    "    for search_keyword in search_keyword_list:\n",
    "        baidu_search( search_keyword, result_file, max_page)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c7f8500a-c443-4422-ab05-15887fe33de4",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "bdc11457-275c-4b9a-b923-206a4e8f9281",
   "metadata": {
    "execution": {
     "iopub.execute_input": "2024-01-31T03:53:42.199804Z",
     "iopub.status.busy": "2024-01-31T03:53:42.199486Z",
     "iopub.status.idle": "2024-01-31T03:53:42.323771Z",
     "shell.execute_reply": "2024-01-31T03:53:42.323161Z",
     "shell.execute_reply.started": "2024-01-31T03:53:42.199776Z"
    }
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<html>\n",
      "<head><title>403 Forbidden</title></head>\n",
      "<body>\n",
      "<center><h1>403 Forbidden</h1></center>\n",
      "<hr><center>nginx</center>\n",
      "</body>\n",
      "</html>\n",
      "\n"
     ]
    }
   ],
   "source": [
    "    url = 'https://icp.chinaz.com/上海凡默谷信息技术有限公司'\n",
    "    response = requests.get(url)\n",
    "    soup = BeautifulSoup(response.text, 'html.parser')\n",
    "    print(response.text)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "id": "06b75915-64e8-412d-a8ce-433f96258e80",
   "metadata": {
    "ExecutionIndicator": {
     "show": true
    },
    "execution": {
     "iopub.execute_input": "2024-01-31T03:56:32.336517Z",
     "iopub.status.busy": "2024-01-31T03:56:32.336207Z",
     "iopub.status.idle": "2024-01-31T03:56:32.444854Z",
     "shell.execute_reply": "2024-01-31T03:56:32.444396Z",
     "shell.execute_reply.started": "2024-01-31T03:56:32.336499Z"
    },
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "ljtljt search——resul:<html>\n",
      "<head><title>500 Internal Server Error</title></head>\n",
      "<body>\n",
      "<center><h1>500 Internal Server Error</h1></center>\n",
      "<hr><center>nginx</center>\n",
      "</body>\n",
      "</html>\n",
      "<!-- a padding to disable MSIE and Chrome friendly error page -->\n",
      "<!-- a padding to disable MSIE and Chrome friendly error page -->\n",
      "<!-- a padding to disable MSIE and Chrome friendly error page -->\n",
      "<!-- a padding to disable MSIE and Chrome friendly error page -->\n",
      "<!-- a padding to disable MSIE and Chrome friendly error page -->\n",
      "<!-- a padding to disable MSIE and Chrome friendly error page -->\n",
      "\n",
      "正在读取：https://icp.chinaz.com/上海凡默谷信息技术有限公司，共查询到0个结果\n"
     ]
    }
   ],
   "source": [
    "## https://cloud.tencent.com/developer/article/2322038\n",
    "\n",
    "import os\n",
    "import random  \n",
    "import time\n",
    "import pandas as pd  \n",
    "import requests \n",
    "from bs4 import BeautifulSoup\n",
    "\n",
    "headers = {\n",
    "    \"User-Agent\": \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36 Edg/109.0.1518.70\",\n",
    "    \"Accept\": \"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9\",\n",
    "    \"Accept-Language\": \"zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6\",\n",
    "    \"Connection\": \"keep-alive\",\n",
    "    \"Accept-Encoding\": \"gzip, deflate\",\n",
    "    \"Host\": \"www.baidu.com\",\n",
    "    \"Cookie\": \"BIDUPSID=E5DC3B4CB152A27DBF1D270E3503794B; PSTM=167872138 ZFY=DqhB1QpFu:APeJVy:AOeNNGsu1YREtMxYZgrqntwJNQlE:C; delPer=0; BD1\"\n",
    "}\n",
    "\n",
    "# url = 'https://www.baidu.com/s?&wd=' + v_keyword + '&pn=' + str(page * 10)\n",
    "url = 'https://icp.chinaz.com/上海凡默谷信息技术有限公司'\n",
    "r = requests.get(url, headers=headers)\n",
    "html = r.text\n",
    "print(f'ljtljt search——resul:{html}')\n",
    "soup =  BeautifulSoup(html)\n",
    "result_list = soup.find_all(class_='result c-container xpath-log new-pmd')\n",
    "print('正在读取：{}，共查询到{}个结果'.format(url, len(result_list)))\n",
    "\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c0715224-5ee9-4dc0-84a5-1897e15bed6b",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.10.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
