import re
import time
import requests
from pymongo import MongoClient
from selenium import webdriver
from selenium.common.exceptions import TimeoutException, NoSuchElementException
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from pyquery import PyQuery


MONGO_URL = 'localhost'
MONGO_DB = 'taobao'
MONGO_TABLE = 'meishi'
KEYWORD = '美食'

client = MongoClient(MONGO_URL)
db = client[MONGO_DB]


def get_response(url):
    timeout = 10
    headers = {
        'Host': 'www.taobao.com',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36',
    }
    response = requests.get(url, headers=headers, timeout=timeout)
    return response


browser = webdriver.Chrome()
wait = WebDriverWait(browser, 10)
try:
    browser.get('https://www.taobao.com')
except TimeoutException:
    print("Time Out")

try:
    input_box = browser.find_element_by_xpath('//*[@id="q"]')
    input_box.send_keys(KEYWORD)

    search_btn = browser.find_element_by_xpath('//*[@id="J_TSearchForm"]/div[1]/button')
    search_btn.click()

    login_account = input("请输入淘宝账号:")
    login_passwd = input("请输入淘宝密码:")

    account_box = browser.find_element_by_xpath('//*[@id="fm-login-id"]')
    account_box.send_keys(login_account)
    passwd_box = browser.find_element_by_xpath('//*[@id="fm-login-password"]')
    passwd_box.send_keys(login_passwd)
    submit_btn = browser.find_element_by_xpath('//*[@id="login-form"]/div[4]/button')
    submit_btn.click()

    time.sleep(15)  # 消停一会儿,不然current_url返回的将会是登录页里面
    default_style = browser.current_url
    print("网格样式的网页:", end='')
    print(default_style)

    list_style = browser.current_url + "&style=list"
    print("列表样式的网页:", end='')
    print(list_style)

except NoSuchElementException:
    print("NoSuchElement")

# 加载列表样式界面
try:
    browser.get(list_style)
except TimeoutException:
    print("Time Out")


def search_max():
    try:
        total_page = wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > div.total')))
        total_page = re.compile('(\d+)').search(total_page.text).group(1)
        return int(total_page)
    except TimeoutException:
        search_max()  # 如果超时,就再查一遍


print(f"共有{search_max()}页")

start_url = list_style + "&bcoffset=6&ntoffset=6&p4ppushleft=1%2C48&s=0"
print("浏览器已打开列表样式界面的第一页")
# browser.get(start_url)

for i in range(1, int(search_max()) + 1):
# for i in range(1, 5):  # 测试用的低配循环
    def begin_crawl():
        url_split = start_url.split('=0')
        page_url = url_split[0] + "=" + str(44 * (i - 1))
        print(f"正在爬取第{i}页数据")
        print(page_url)
        wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-itemlist .items .item')))

        browser.get(page_url)
        page_analyze = PyQuery(browser.page_source)
        # print(page_analyze)
        info_collection = page_analyze('#mainsrp-itemlist .items .item').items()
        # print(info_collection.__next__())

        for item in info_collection:
            wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '#mainsrp-itemlist .items .item')))
            result = {
                'image': item.find('.pic .img').attr('src').replace('\n', ''),
                'url': item.find('.pic .pic-link').attr('href'),
                'price': item.find('.price').text().replace('\n', ''),
                'deal': item.find('.deal-cnt').text().replace('\n', ''),
                'title': item.find('.title').text().replace('\n', ''),
                'shop': item.find('.shop').text().replace('\n', ''),
                'location': item.find('.location').text().replace('\n', '')
            }
            print(result)
        print(f"第{i}页爬取完毕")

        try:
            if db[MONGO_TABLE].insert_one(result):
                print(f'已将第{i}页数据存入MONODB')
        except Exception:
            print("当前页面存储失败")

        time.sleep(3)
    begin_crawl()
# browser.close()











