{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 6. 协程"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 6.1 协程与一般对比"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "crawling url_1\n",
      "done crawling url_1\n",
      "crawling url_2\n",
      "done crawling url_2\n",
      "crawling url_3\n",
      "done crawling url_3\n",
      "crawling url_4\n",
      "done crawling url_4\n",
      "CPU times: total: 0 ns\n",
      "Wall time: 10 s\n"
     ]
    }
   ],
   "source": [
    "import time\n",
    "\n",
    "def crawl_page(url):\n",
    "    print(f\"crawling {url}\")\n",
    "    sleep_time = int(url.split('_')[-1])\n",
    "    time.sleep(sleep_time)\n",
    "    print(\"done crawling {}\".format(url))\n",
    "\n",
    "def main():\n",
    "    urls = ['url_1', 'url_2', 'url_3', 'url_4']\n",
    "    for url in urls:\n",
    "        crawl_page(url)\n",
    "\n",
    "\n",
    "%time main()\n",
    "    \n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Requirement already satisfied: nest_asyncio in d:\\myconfiguration\\yyf\\appdata\\roaming\\python\\python312\\site-packages (1.6.0)\n"
     ]
    }
   ],
   "source": [
    "#! pip install asyncio\n",
    "! pip install nest_asyncio"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "crawling url_1\n",
      "crawling url_2\n",
      "crawling url_3\n",
      "crawling url_4\n",
      "done crawling url_1\n",
      "done crawling url_2\n",
      "done crawling url_3\n",
      "done crawling url_4\n",
      "CPU times: total: 0 ns\n",
      "Wall time: 4.02 s\n",
      "crawling url_1\n",
      "crawling url_2\n",
      "crawling url_3\n",
      "crawling url_4\n",
      "done crawling url_1\n",
      "done crawling url_2\n",
      "done crawling url_3\n",
      "done crawling url_4\n",
      "CPU times: total: 0 ns\n",
      "Wall time: 4.01 s\n"
     ]
    }
   ],
   "source": [
    "import asyncio\n",
    "import nest_asyncio\n",
    "nest_asyncio.apply()\n",
    "\n",
    "async def crawl_page(url):\n",
    "    print(f\"crawling {url}\")\n",
    "    sleep_time = int(url.split('_')[-1])\n",
    "    await asyncio.sleep(sleep_time)\n",
    "    print(\"done crawling {}\".format(url))\n",
    "\n",
    "async def main():\n",
    "    urls = ['url_1', 'url_2', 'url_3', 'url_4']\n",
    "    for url in urls:\n",
    "        await crawl_page(url)\n",
    "\n",
    "async def main1():\n",
    "    urls = ['url_1', 'url_2', 'url_3', 'url_4'] \n",
    "    tasks = [crawl_page(url) for url in urls]\n",
    "    # for task in tasks:\n",
    "    #     await task\n",
    "    # for task in asyncio.as_completed(tasks):\n",
    "    #     await task\n",
    "    await asyncio.gather(*tasks)\n",
    "async def main2():\n",
    "    urls = ['url_1', 'url_2', 'url_3', 'url_4'] \n",
    "    tasks = [ asyncio.create_task(crawl_page(url)) for url in urls]\n",
    "    # for task in tasks:\n",
    "    #     await task\n",
    "    await asyncio.gather(*tasks)\n",
    "   \n",
    "\n",
    "# %time asyncio.run(main())\n",
    "%time asyncio.run(main1())\n",
    "%time asyncio.run(main2())\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "before await\n",
      "worker_1 start\n",
      "worker_1 done\n",
      "awaited worker_1\n",
      "worker_2 start\n",
      "worker_2 done\n",
      "awaited worker_2\n",
      "CPU times: total: 15.6 ms\n",
      "Wall time: 3.01 s\n",
      "worker_1 start\n",
      "worker_2 start\n",
      "worker_1 done\n",
      "worker_2 done\n",
      "CPU times: total: 0 ns\n",
      "Wall time: 2.01 s\n"
     ]
    }
   ],
   "source": [
    "import asyncio\n",
    "\n",
    "async def worker_1():\n",
    "    print('worker_1 start')\n",
    "    await asyncio.sleep(1)\n",
    "    print('worker_1 done')\n",
    "\n",
    "async def worker_2():\n",
    "    print('worker_2 start')\n",
    "    await asyncio.sleep(2)\n",
    "    print('worker_2 done')\n",
    "\n",
    "async def main():\n",
    "    print('before await')\n",
    "    await worker_1()\n",
    "    print('awaited worker_1')\n",
    "    await worker_2()\n",
    "    print('awaited worker_2')\n",
    "\n",
    "async def main1():\n",
    "    await asyncio.gather(worker_1(), worker_2())\n",
    "%time asyncio.run(main())\n",
    "%time asyncio.run(main1())\n",
    "\n",
    "########## 输出 ##########\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "before await\n",
      "worker_1 start\n",
      "worker_2 start\n",
      "worker_1 done\n",
      "awaited worker_1\n",
      "worker_2 done\n",
      "awaited worker_2\n",
      "CPU times: total: 15.6 ms\n",
      "Wall time: 2 s\n",
      "worker_1 start\n",
      "worker_2 start\n",
      "worker_1 done\n",
      "worker_2 done\n",
      "CPU times: total: 0 ns\n",
      "Wall time: 2.02 s\n"
     ]
    }
   ],
   "source": [
    "import asyncio\n",
    "\n",
    "async def worker_1():\n",
    "    print('worker_1 start')\n",
    "    await asyncio.sleep(1)\n",
    "    print('worker_1 done')\n",
    "\n",
    "async def worker_2():\n",
    "    print('worker_2 start')\n",
    "    await asyncio.sleep(2)\n",
    "    print('worker_2 done')\n",
    "\n",
    "async def main():\n",
    "    task1 = asyncio.create_task(worker_1())\n",
    "    task2 = asyncio.create_task(worker_2())\n",
    "    print('before await')\n",
    "    await task1\n",
    "    print('awaited worker_1')\n",
    "    await task2\n",
    "    print('awaited worker_2')\n",
    "async def main1():\n",
    "    await asyncio.gather(worker_1(), worker_2())\n",
    "%time asyncio.run(main())\n",
    "%time asyncio.run(main1())\n",
    "\n",
    "########## 输出 ##########\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "worker_1 start\n",
      "worker_2 start\n",
      "worker_3 start\n",
      "worker_1 done\n",
      "worker_2 done\n",
      "[1, 2.0, CancelledError('')]\n",
      "CPU times: total: 0 ns\n",
      "Wall time: 3.01 s\n"
     ]
    }
   ],
   "source": [
    "import asyncio\n",
    "\n",
    "async def worker_1():\n",
    "    print('worker_1 start')\n",
    "    await asyncio.sleep(1)\n",
    "    print('worker_1 done')\n",
    "    return 1\n",
    "\n",
    "async def worker_2():\n",
    "    print('worker_2 start')\n",
    "    await asyncio.sleep(2)\n",
    "    print('worker_2 done')\n",
    "    return 2 / 1\n",
    "\n",
    "async def worker_3():\n",
    "    print('worker_3 start')\n",
    "    await asyncio.sleep(3)\n",
    "    print('worker_3 done')\n",
    "    return 3\n",
    "\n",
    "async def main():\n",
    "    task_1 = asyncio.create_task(worker_1())\n",
    "    task_2 = asyncio.create_task(worker_2())\n",
    "    task_3 = asyncio.create_task(worker_3())\n",
    "\n",
    "    await asyncio.sleep(3)\n",
    "    task_3.cancel()\n",
    "\n",
    "    res = await asyncio.gather(task_1, task_2, task_3, return_exceptions=True)\n",
    "    print(res)\n",
    "\n",
    "%time asyncio.run(main())\n",
    "\n",
    "########## 输出 ##########\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 6.2 consumer producer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "producer 1, put 7\n",
      "producer 2, put 10\n",
      "consumer 1, got 7\n",
      "consumer 2, got 10\n",
      "producer 1, put 8\n",
      "producer 2, put 6\n",
      "producer 2, put 9\n",
      "producer 1, put 5\n",
      "producer 2, put 3\n",
      "producer 2, put 8\n",
      "producer 1, put 3\n",
      "consumer 2, got 8\n",
      "producer 1, put 2\n",
      "consumer 1, got 6\n",
      "consumer 2, got 9\n",
      "consumer 1, got 5\n",
      "consumer 2, got 3\n",
      "consumer 1, got 8\n",
      "consumer 2, got 3\n",
      "consumer 1, got 2\n",
      "CPU times: total: 0 ns\n",
      "Wall time: 10 s\n"
     ]
    }
   ],
   "source": [
    "import asyncio\n",
    "import random\n",
    "import nest_asyncio\n",
    "nest_asyncio.apply()\n",
    "\n",
    "async def consumer(queue, id):\n",
    "    while True:\n",
    "        val = await queue.get()\n",
    "        print(f'consumer {id}, got {val}')\n",
    "        await asyncio.sleep(random.uniform(0.5, 1.5))\n",
    "\n",
    "async def producer(queue, id):\n",
    "    for i in range(5):\n",
    "        val = random.randint(1, 10)\n",
    "        await queue.put(val)\n",
    "        print(f'producer {id}, put {val}')\n",
    "        await asyncio.sleep(random.uniform(0.1, 0.5)) \n",
    "\n",
    "async def main():\n",
    "    queue = asyncio.Queue()\n",
    "    consumer_1 = asyncio.create_task(consumer(queue, 1))\n",
    "    consumer_2 = asyncio.create_task(consumer(queue, 2))\n",
    "    producer_1 = asyncio.create_task(producer(queue, 1))\n",
    "    producer_2 = asyncio.create_task(producer(queue, 2))\n",
    " \n",
    "\n",
    "    await asyncio.sleep(10)\n",
    "    consumer_1.cancel()\n",
    "    consumer_2.cancel()\n",
    "\n",
    "    await asyncio.gather(consumer_1,consumer_2,producer_1,producer_2, return_exceptions=True)\n",
    "\n",
    "\n",
    "%time asyncio.run(main())\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 6.4 爬虫"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Collecting lxml\n",
      "  Using cached lxml-5.3.2-cp312-cp312-win_amd64.whl.metadata (3.7 kB)\n",
      "Using cached lxml-5.3.2-cp312-cp312-win_amd64.whl (3.8 MB)\n",
      "Installing collected packages: lxml\n",
      "Successfully installed lxml-5.3.2\n",
      "Requirement already satisfied: requests in d:\\program\\python\\python312\\lib\\site-packages (2.32.3)\n",
      "Requirement already satisfied: charset-normalizer<4,>=2 in d:\\program\\python\\python312\\lib\\site-packages (from requests) (3.4.1)\n",
      "Requirement already satisfied: idna<4,>=2.5 in d:\\program\\python\\python312\\lib\\site-packages (from requests) (3.10)\n",
      "Requirement already satisfied: urllib3<3,>=1.21.1 in d:\\program\\python\\python312\\lib\\site-packages (from requests) (2.4.0)\n",
      "Requirement already satisfied: certifi>=2017.4.17 in d:\\program\\python\\python312\\lib\\site-packages (from requests) (2025.1.31)\n",
      "Collecting beautifulsoup4\n",
      "  Downloading beautifulsoup4-4.13.4-py3-none-any.whl.metadata (3.8 kB)\n",
      "Collecting soupsieve>1.2 (from beautifulsoup4)\n",
      "  Downloading soupsieve-2.7-py3-none-any.whl.metadata (4.6 kB)\n",
      "Requirement already satisfied: typing-extensions>=4.0.0 in d:\\program\\python\\python312\\lib\\site-packages (from beautifulsoup4) (4.13.2)\n",
      "Downloading beautifulsoup4-4.13.4-py3-none-any.whl (187 kB)\n",
      "Downloading soupsieve-2.7-py3-none-any.whl (36 kB)\n",
      "Installing collected packages: soupsieve, beautifulsoup4\n",
      "Successfully installed beautifulsoup4-4.13.4 soupsieve-2.7\n"
     ]
    }
   ],
   "source": [
    "! pip install lxml\n",
    "! pip install requests\n",
    "! pip install beautifulsoup4\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "动画 / 音乐 04月25日 https://img2.doubanio.com/view/photo/s_ratio_poster/public/p2920430281.webp\n",
      "剧情 / 动作 04月26日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2913774287.webp\n",
      "剧情 / 动作 / 科幻 04月30日 https://img1.doubanio.com/view/photo/s_ratio_poster/public/p2920193988.webp\n",
      "剧情 / 传记 04月30日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2919557624.webp\n",
      "剧情 / 犯罪 / 悬疑 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920392225.webp\n",
      "剧情 / 犯罪 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920302215.webp\n",
      "剧情 05月01日 https://img1.doubanio.com/view/photo/s_ratio_poster/public/p2919914598.webp\n",
      "剧情 / 喜剧 05月01日 https://img1.doubanio.com/view/photo/s_ratio_poster/public/p2917452778.webp\n",
      "剧情 / 喜剧 05月01日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2920664693.webp\n",
      "儿童 / 动画 / 冒险 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920015256.webp\n",
      "儿童 / 科幻 / 动画 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920250165.webp\n",
      "喜剧 / 科幻 / 动画 05月01日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2919980437.webp\n",
      "科幻 / 动画 / 冒险 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920194036.webp\n",
      "动作 / 犯罪 05月02日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920653336.webp\n",
      "惊悚 / 恐怖 05月09日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2917236414.webp\n",
      "纪录片 05月10日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2920541823.webp\n",
      "剧情 05月13日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2908008647.webp\n",
      "剧情 / 爱情 05月20日 https://img1.doubanio.com/view/photo/s_ratio_poster/public/p2648230660.webp\n",
      "动画 / 奇幻 / 冒险 05月30日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2917973826.webp\n",
      "剧情 / 犯罪 / 悬疑 05月31日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920250194.webp\n",
      "CPU times: total: 594 ms\n",
      "Wall time: 19.3 s\n"
     ]
    }
   ],
   "source": [
    "import requests\n",
    "from bs4 import BeautifulSoup\n",
    "import nest_asyncio\n",
    "nest_asyncio.apply()\n",
    "\n",
    "def main():\n",
    "\n",
    "    headers = {\n",
    "        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',\n",
    "        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',\n",
    "        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n",
    "        'Connection': 'keep-alive',\n",
    "        'Referer': 'https://movie.douban.com/',\n",
    "        'Sec-Fetch-Dest': 'document',\n",
    "        'Sec-Fetch-Mode': 'navigate',\n",
    "        'Sec-Fetch-Site': 'same-origin',\n",
    "        'Sec-Fetch-User': '?1',\n",
    "        'Upgrade-Insecure-Requests': '1',\n",
    "    }\n",
    "    url = \"https://movie.douban.com/cinema/later/beijing/\"\n",
    "\n",
    "    response = requests.get(url, headers=headers)\n",
    "    response.raise_for_status()\n",
    "    init_page = response.content\n",
    "    init_soup = BeautifulSoup(init_page, 'lxml')\n",
    "    all_movies = init_soup.find('div', id=\"showing-soon\")\n",
    "    for each_movie in all_movies.find_all('div', class_=\"item\"):\n",
    "        all_a = each_movie.find_all('a')\n",
    "        all_li_tag = each_movie.find_all('li')\n",
    "        movie_name = all_li_tag[1].text\n",
    "        url_to_fetch = all_a[1]['href']\n",
    "        movie_date = all_li_tag[0].text \n",
    "        \n",
    "        response_item = requests.get(url_to_fetch, headers=headers).content\n",
    "        soup_item = BeautifulSoup(response_item, 'lxml')\n",
    "        img_tag = soup_item.find('img')\n",
    "        print('{} {} {}'.format(movie_name, movie_date, img_tag['src']))\n",
    "\n",
    "\n",
    "%time main()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "孤独摇滚(上) 04月25日 https://img2.doubanio.com/view/photo/s_ratio_poster/public/p2920430281.webp\n",
      "荣耀 04月26日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2913774287.webp\n",
      "雷霆特攻队* 04月30日 https://img1.doubanio.com/view/photo/s_ratio_poster/public/p2920193988.webp\n",
      "水饺皇后 04月30日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2919557624.webp\n",
      "大风杀 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920392225.webp\n",
      "猎金游戏 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920302215.webp\n",
      "独一无二 05月01日 https://img1.doubanio.com/view/photo/s_ratio_poster/public/p2919914598.webp\n",
      "苍茫的天涯是我的爱 05月01日 https://img1.doubanio.com/view/photo/s_ratio_poster/public/p2917452778.webp\n",
      "人生开门红 05月01日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2920664693.webp\n",
      "熊孩子·探险熊兵 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920015256.webp\n",
      "开心超人之逆世营救 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920250165.webp\n",
      "海底小纵队：海啸大危机 05月01日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2919980437.webp\n",
      "宇宙护卫队：百变流星 05月01日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920194036.webp\n",
      "九龙城寨之围城 05月02日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920653336.webp\n",
      "祭屋 05月09日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2917236414.webp\n",
      "日暮归乡 05月10日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2920541823.webp\n",
      "内沙 05月13日 https://img3.doubanio.com/view/photo/s_ratio_poster/public/p2908008647.webp\n",
      "情书 05月20日 https://img1.doubanio.com/view/photo/s_ratio_poster/public/p2648230660.webp\n",
      "时间之子 05月30日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2917973826.webp\n",
      "私家侦探 05月31日 https://img9.doubanio.com/view/photo/s_ratio_poster/public/p2920250194.webp\n",
      "CPU times: total: 578 ms\n",
      "Wall time: 2.73 s\n"
     ]
    }
   ],
   "source": [
    "#### 6.5 异步爬虫\n",
    "\n",
    "import asyncio\n",
    "import aiohttp\n",
    "\n",
    "from bs4 import BeautifulSoup\n",
    "\n",
    "async def fetch_content(url):\n",
    "    headers = {\n",
    "        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',\n",
    "        'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7',\n",
    "        'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n",
    "        'Connection': 'keep-alive',\n",
    "        'Referer': 'https://movie.douban.com/',\n",
    "        'Sec-Fetch-Dest': 'document',\n",
    "        'Sec-Fetch-Mode': 'navigate',\n",
    "        'Sec-Fetch-Site': 'same-origin',\n",
    "        'Sec-Fetch-User': '?1',\n",
    "        'Upgrade-Insecure-Requests': '1',\n",
    "    }\n",
    "    async with aiohttp.ClientSession(\n",
    "        headers=headers, connector=aiohttp.TCPConnector(ssl=False)\n",
    "    ) as session:\n",
    "        async with session.get(url) as response:\n",
    "            return await response.text()\n",
    "\n",
    "async def main():\n",
    "    url = \"https://movie.douban.com/cinema/later/beijing/\"\n",
    "    init_page = await fetch_content(url)\n",
    "    init_soup = BeautifulSoup(init_page, 'lxml')\n",
    "\n",
    "    movie_names, urls_to_fetch, movie_dates = [], [], []\n",
    "\n",
    "    all_movies = init_soup.find('div', id=\"showing-soon\")\n",
    "    for each_movie in all_movies.find_all('div', class_=\"item\"):\n",
    "        all_a_tag = each_movie.find_all('a')\n",
    "        all_li_tag = each_movie.find_all('li')\n",
    "\n",
    "        movie_names.append(all_a_tag[1].text)\n",
    "        urls_to_fetch.append(all_a_tag[1]['href'])\n",
    "        movie_dates.append(all_li_tag[0].text)\n",
    "\n",
    "    tasks = [fetch_content(url) for url in urls_to_fetch]\n",
    "    pages = await asyncio.gather(*tasks)\n",
    "\n",
    "    for movie_name, movie_date, page in zip(movie_names, movie_dates, pages):\n",
    "        soup_item = BeautifulSoup(page, 'lxml')\n",
    "        img_tag = soup_item.find('img')\n",
    "\n",
    "        print('{} {} {}'.format(movie_name, movie_date, img_tag['src']))\n",
    "\n",
    "%time asyncio.run(main())\n",
    "\n",
    "\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.9"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
