{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 70,
   "metadata": {},
   "outputs": [],
   "source": [
    "import re\n",
    "import time\n",
    "import datetime\n",
    "import pymongo\n",
    "from tqdm import tqdm\n",
    "import copy\n",
    "from loguru import logger\n",
    "\n",
    "from selenium import webdriver\n",
    "from selenium.webdriver.common.by import By\n",
    "from selenium.webdriver.common.keys import Keys\n",
    "from selenium.webdriver.support import expected_conditions as EC\n",
    "from selenium.webdriver.support.wait import WebDriverWait\n",
    "\n",
    "from selenium.common.exceptions import NoSuchElementException,StaleElementReferenceException\n",
    "\n",
    "\n",
    "class Tweet:\n",
    "\n",
    "    def __init__(self, query, uid, ptime, pcontent, padditional, nb_reply, nb_retweet, nb_favorite):\n",
    "        self.query = query\n",
    "        self.uid = uid\n",
    "        self.ptime = ptime\n",
    "        self.pcontent = pcontent\n",
    "        self.padditional = padditional  # 转发推文，文章链接，图片，视频\n",
    "        self.nb_retweet = nb_retweet  # nbr of retweet\n",
    "        self.nb_favorite = nb_favorite  # nbr of favorite\n",
    "        self.nb_reply = nb_reply    # nbr of reply\n",
    "\n",
    "    def __repr__(self):\n",
    "        return \"Tweet={}\\nQuery={}\".format(self.pcontent, self.query)\n",
    "\n",
    "\n",
    "class User:\n",
    "\n",
    "    def __init__(self, profile_url):\n",
    "        self.profile_url = profile_url\n",
    "        self.ID = profile_url.split('/')[-1]\n",
    "        self.name = ''\n",
    "        self.avatar = ''\n",
    "\n",
    "    def __repr__(self):\n",
    "        return \"User {}\".format(self.ID)\n",
    "\n",
    "\n",
    "def convert_time(x):\n",
    "    '''\n",
    "    for x in ['20分钟','1小时','1天', '10月10日','2018年10月1日']:\n",
    "        print(convert_time(x))\n",
    "    '''\n",
    "    now = datetime.datetime.now()\n",
    "    pattern = r'\\d{4}年\\d+月\\d+日'\n",
    "    if re.match(pattern, x):\n",
    "        return x\n",
    "    pattern = r'\\d+月\\d+日'\n",
    "    if re.match(pattern, x):\n",
    "        return \"{}年\".format(now.year)+x\n",
    "    return \"{}年{}月{}日\".format(now.year, now.month, now.day)\n",
    "\n",
    "\n",
    "def is_non_result(browser):\n",
    "    '''\n",
    "    判断结果是否为空\n",
    "    '''\n",
    "    result_div_xpath = \"//div[@id='react-root']\"\n",
    "    wait.until(EC.presence_of_element_located((By.XPATH, result_div_xpath)))\n",
    "    try:\n",
    "        result_div = browser.find_element_by_xpath(result_div_xpath)\n",
    "        return '没有符合搜索条件的结果' in result_div.text\n",
    "    except NoSuchElementException as e:\n",
    "        return False\n",
    "\n",
    "\n",
    "def get_search_input_v1(browser):\n",
    "    # 定位搜索框\n",
    "    search_input_xpath = \"//input[@placeholder='搜索 Twitter']\"\n",
    "    wait.until(EC.presence_of_element_located((By.XPATH, search_input_xpath)))\n",
    "    search_input = browser.find_element_by_xpath(search_input_xpath)\n",
    "    return search_input\n",
    "\n",
    "\n",
    "def get_search_input_v2(browser):\n",
    "    # 请求主站\n",
    "    browser.get('https://twitter.com/search-home')\n",
    "    # 定位搜索框\n",
    "    search_input_id = 'search-home-input'\n",
    "    wait.until(EC.presence_of_element_located((By.ID, search_input_id)))\n",
    "    search_input = browser.find_element_by_id(search_input_id)\n",
    "    return search_input\n",
    "\n",
    "\n",
    "def extract_reply_retweet_favorite(element):\n",
    "    t = []\n",
    "    for x in element.find_elements_by_xpath('./div')[:3]:\n",
    "        if x.text.strip() == '':\n",
    "            t.append(0)\n",
    "        else:\n",
    "            t.append(int(x.text.strip()))\n",
    "    return tuple(t)\n",
    "\n",
    "\n",
    "def parse_result_div(result_div):\n",
    "    count = 0\n",
    "    for div in result_div:\n",
    "        user, tweet = div.find_elements_by_xpath('./div')\n",
    "        profile_url = user.find_element_by_tag_name(\n",
    "            'a').get_attribute('href').strip()\n",
    "        uid = profile_url.split('/')[-1]\n",
    "#         print('pasing uid={}'.format(uid))\n",
    "        a, *b_c, d = tweet.find_elements_by_xpath('./div')  # 按照div分为>=3层\n",
    "        ptime = a.find_elements_by_tag_name('a')[-1].text\n",
    "        ptime = convert_time(ptime)\n",
    "        nb_reply, nb_retweet, nb_favorite = 0,0,0\n",
    "        try:\n",
    "            nb_reply, nb_retweet, nb_favorite = extract_reply_retweet_favorite(\n",
    "                d)\n",
    "        except:\n",
    "            nb_reply, nb_retweet, nb_favorite = 0, 0, 0\n",
    "        pcontent = b_c[0].text\n",
    "        padditional = []\n",
    "        if len(b_c) > 1:\n",
    "            for x in b_c[1:]:\n",
    "                try:\n",
    "                    a = x.find_element_by_tag_name('a').get_attribute('href')\n",
    "                    padditional.append(a)\n",
    "                except NoSuchElementException as e:\n",
    "                    padditional.append(x.text.strip())\n",
    "        user = User(profile_url)\n",
    "        tweet = Tweet(query, uid, ptime, pcontent, padditional,\n",
    "                      nb_reply, nb_retweet, nb_favorite)\n",
    "        # save to databse\n",
    "        if user_table.insert_one(user.__dict__) and tweet_table.insert_one(tweet.__dict__):\n",
    "            count += 1\n",
    "    return count\n",
    "\n",
    "\n",
    "def crawl(browser, query):\n",
    "    count = 0\n",
    "    result_div_xpath = '//div[@data-testid=\"tweet\"]'\n",
    "    wait.until(EC.presence_of_element_located((By.XPATH, result_div_xpath)))\n",
    "    result_div = browser.find_elements_by_xpath(result_div_xpath)\n",
    "    last_div = result_div[-1]\n",
    "    # 解析结果\n",
    "    count += parse_result_div(result_div)\n",
    "    while count < MAX_SIZE:\n",
    "        logger.info(\"{}/{}\".format(count,MAX_SIZE))\n",
    "        result_div_xpath = '//div[@data-testid=\"tweet\"]'\n",
    "        wait.until(EC.presence_of_element_located((By.XPATH, result_div_xpath)))\n",
    "        result_div = browser.find_elements_by_xpath(result_div_xpath)\n",
    "        last_div = result_div[-1]\n",
    "        try:\n",
    "            count += parse_result_div(result_div)\n",
    "        except StaleElementReferenceException as e:\n",
    "            time.sleep(2)\n",
    "            continue\n",
    "        \n",
    "        # 翻页\n",
    "        while True:\n",
    "            browser.execute_script(\n",
    "                'window.scrollTo(0,document.body.scrollHeight)')\n",
    "            wait.until(EC.presence_of_element_located(\n",
    "                (By.XPATH, result_div_xpath)))\n",
    "            result_div = browser.find_elements_by_xpath(result_div_xpath)\n",
    "            if result_div[-1] != last_div:\n",
    "                last_div = result_div[-1]\n",
    "                break"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "client = pymongo.MongoClient(\"mongodb://10.108.17.25:27017/\")\n",
    "twitter_db = client[\"twitter\"]\n",
    "user_table = twitter_db['user']\n",
    "tweet_table = twitter_db['tweet']\n",
    "MAX_SIZE = 50\n",
    "\n",
    "# 打开浏览器\n",
    "browser = webdriver.Chrome()\n",
    "wait = WebDriverWait(browser, 100)\n",
    "\n",
    "# 人工登录\n",
    "browser.get('https://twitter.com/')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# time.sleep(60)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 71,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "  0%|          | 0/1 [00:00<?, ?it/s]\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A2019-12-15 21:56:07.117 | INFO     | __main__:<module>:7 - query = the belt and road\n",
      "2019-12-15 21:56:11.655 | INFO     | __main__:crawl:146 - 10/50\n",
      "2019-12-15 21:56:13.571 | INFO     | __main__:crawl:146 - 20/50\n",
      "2019-12-15 21:56:15.190 | INFO     | __main__:crawl:146 - 27/50\n",
      "2019-12-15 21:56:17.691 | INFO     | __main__:crawl:146 - 27/50\n",
      "2019-12-15 21:56:21.419 | INFO     | __main__:crawl:146 - 48/50\n",
      "2019-12-15 21:56:23.922 | INFO     | __main__:crawl:146 - 48/50\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "\n",
      "100%|██████████| 1/1 [00:19<00:00, 19.94s/it]\u001b[A\u001b[A\u001b[A\u001b[A\u001b[A\n"
     ]
    }
   ],
   "source": [
    "bad_query_list = []\n",
    "query_list = ['the belt and road']\n",
    "browser.refresh()\n",
    "time.sleep(2)\n",
    "\n",
    "for query in tqdm(query_list):\n",
    "    logger.info('query = {}'.format(query))\n",
    "    browser.get('https://twitter.com/explore')\n",
    "\n",
    "    # 定位搜索框\n",
    "    if browser.current_url == 'https://twitter.com/explore':\n",
    "        search_input = get_search_input_v1(browser)\n",
    "    else:\n",
    "        search_input = get_search_input_v2(browser)\n",
    "    # 搜索query\n",
    "    search_input.clear()\n",
    "    search_input.send_keys(query)\n",
    "    search_input.send_keys(Keys.ENTER)\n",
    "\n",
    "    # 获取结果\n",
    "    if is_non_result(browser):\n",
    "        bad_query_list.append(query)\n",
    "        continue\n",
    "    time.sleep(1)\n",
    "    crawl(browser, query)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
