from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from urllib.parse import quote
from pyquery import PyQuery
KEYWORD = 'iPad'        #搜索关键词
browser = webdriver.Chrome(r'D:\软件开发文件\网络爬虫\chromedriver.exe')
wait = WebDriverWait(browser, 10)       #超时等待时间

def crawl_page(page):
    # try:
        url = 'https://s.taobao.com/search?q=' + quote(KEYWORD)
        browser.get(url)

        if page > 1:
            page_box = wait.until(
                EC.presence_of_element_located(
                    (By.CSS_SELECTOR, '.input.J_Input')
                )
            )
            submit_button = wait.until(
                EC.presence_of_element_located(
                    (By.CSS_SELECTOR, '.btn.J_Submit')
                )
            )
            page_box.clear()
            page_box.send_keys(page)
            submit_button.click()
        wait.until(
            EC.presence_of_element_located(
                (By.CSS_SELECTOR('#mainsrp-itemlist .items .item'))
            )
        )
        get_products()
    # except:
    #     crawl_page(page)

def get_products():
    #获取HTML
    html=browser.page_source        #获取网页源代码
    doc = PyQuery(html)
    items = doc('.m-itemlist .items .item').items()       #获取商品
    for item in items:
        product = {
            'title' : item.find('.title').text(),       #获取商品标题
            'price' : item.find('.price').text()        #获取商品价格

        }
        print(product)
crawl_page(1)

