{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 154,
   "id": "364511ae",
   "metadata": {},
   "outputs": [],
   "source": [
    "import requests\n",
    "from bs4 import element\n",
    "from bs4 import BeautifulSoup\n",
    "import pangu\n",
    "import json"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 152,
   "id": "243dacdf",
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "常绿乔木 11\n",
      "1\n",
      "2\n",
      "3\n",
      "4\n",
      "5\n",
      "6\n",
      "7\n",
      "8\n",
      "9\n",
      "10\n",
      "11\n",
      "落叶乔木 9\n",
      "1\n",
      "2\n",
      "3\n",
      "4\n",
      "5\n",
      "6\n",
      "7\n",
      "8\n",
      "9\n",
      "灌木 7\n",
      "1\n",
      "2\n",
      "3\n",
      "4\n",
      "5\n",
      "6\n",
      "7\n",
      "草本 4\n",
      "1\n",
      "2\n",
      "3\n",
      "4\n",
      "藤本 2\n",
      "1\n",
      "2\n",
      "水生 1\n",
      "1\n",
      "棕榈 1\n",
      "1\n",
      "竹 2\n",
      "1\n",
      "2\n"
     ]
    }
   ],
   "source": [
    "seq = {}\n",
    "resp = requests.get('http://www.hqjt.ynu.edu.cn/m/plantclass/index.jhtml')\n",
    "sp1 = BeautifulSoup(resp.text)\n",
    "for category in sp1.select('body > div.conter.conter_lb > div.div1 > ul > li > a'):\n",
    "    name = category['title']\n",
    "    data = []\n",
    "    url = 'http://www.hqjt.ynu.edu.cn' + category['href']\n",
    "    sp2 = BeautifulSoup(requests.get(url).text)\n",
    "    total_page = int(sp2.select_one(\n",
    "        'body > div.conter.conter_lb > div.site > span').get_text(strip=True).split('/')[1])\n",
    "    print(name, total_page)\n",
    "    for i in range(1, total_page + 1):\n",
    "        print(i)\n",
    "        sp3 = BeautifulSoup(requests.get(f\"{url.split('index.jhtml')[0]}index_{i}.jhtml\").text)\n",
    "        for plant in sp3.select('body > div.conter.conter_lb > div.service > a'):\n",
    "            t = {\n",
    "                'name': plant['title'],\n",
    "                'cover': 'http://www.hqjt.ynu.edu.cn' + plant.select_one('img')['src'],\n",
    "                'information': None\n",
    "            }\n",
    "            \n",
    "            sp4 = BeautifulSoup(requests.get('http://www.hqjt.ynu.edu.cn' + plant['href']).text)\n",
    "            information = {}\n",
    "            last_title = None\n",
    "            for e in sp4.select_one('body > div.xw_nr > div.plant').children:\n",
    "                if isinstance(e, (element.Comment, element.NavigableString)):\n",
    "                    continue\n",
    "                if e.name == 'h4':\n",
    "                    last_title = e.get_text(strip=True)\n",
    "                    information[last_title] = []\n",
    "                elif e.name == 'div':\n",
    "                    for s in e.children:\n",
    "                        if s.name == 'p':\n",
    "                            if last_title == '名人简介':\n",
    "                                for p in s.children:\n",
    "                                    if len(p.get_text(strip=True)) != 0:\n",
    "                                        information[last_title].append(p.get_text(strip=True))\n",
    "                            else:\n",
    "                                information[last_title].append(s.get_text(strip=True))\n",
    "                        elif s.name == 'ul':\n",
    "                            if len(information[last_title]) == 0:\n",
    "                                information[last_title] = {}\n",
    "                            for l in s.children:\n",
    "                                if l.name == 'li':\n",
    "                                    k = l.select_one('span').get_text(strip=True)[:-1]\n",
    "                                    v = l.select_one('div').get_text(strip=True)\n",
    "                                    information[last_title][k] = v\n",
    "                                    \n",
    "            t['information'] = information\n",
    "            data.append(t)\n",
    "    seq[name] = data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 156,
   "id": "1e44013e",
   "metadata": {},
   "outputs": [],
   "source": [
    "with open('plant.json', 'w', encoding='utf-8') as fp:\n",
    "    json.dump(seq, fp)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "6b2fd1bf",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "pytorch-env",
   "language": "python",
   "name": "pytorch-env"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.18"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
