import re
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from pyquery import PyQuery as pq
import time
import pyodbc
import csv

brower=webdriver.Chrome()
wait=WebDriverWait(brower,10)


def search():

    try:
        brower.get('https://www.taobao.com/')
        input=wait.until(
        EC.presence_of_element_located((By.CSS_SELECTOR,'#q'))
        )
        submit = wait.until(EC.element_to_be_clickable((By.CSS_SELECTOR,'#J_TSearchForm > div.search-button > button')))
        input.send_keys('美食')
        submit.click()
        time.sleep(15)
        get_products()

    except TimeoutException:
        return search()
def next_page(page_number):
    try:
        input=wait.until(
            EC.presence_of_element_located((By.CSS_SELECTOR,'#mainsrp-pager > div > div > div > div.form > input'))
        )
        submit=wait.until(EC.presence_of_element_located((By.CSS_SELECTOR,'#mainsrp-pager > div > div > div > div.form > span.btn.J_Submit')))
        input.clear()
        input.send_keys(page_number)
        submit.click()
        wait.until(EC.text_to_be_present_in_element((By.CSS_SELECTOR,'#mainsrp-pager > div > div > div > ul > li.item.active > span'),str(page_number)))
        get_products()
    except TimeoutException:
        next_page(page_number)
def insert_sql(product):
    # 数据库连接
    conn = pyodbc.connect(DRIVER='{SQL Server}', SERVER='212.64.70.92', DATABASE='weimingzhong', UID="SA",PWD="Guat1234")
    cursor = conn.cursor()
    sql='INSERT INTO 零食(标题,销量,店名,位置) VALUES(N\'{}\',\'{}\',N\'{}\',N\'{}\')'\
        .format(str(product.get('title')),str(product.get('deal')),str(product.get('shop')),str(product.get('location')))
    #print(sql)
    cursor.execute(sql)
    conn.commit()
    conn.close()
def save_xsl(product):
    '''
    headers = ['titile', 'deal', 'shop', 'location']
    f_csv.writerow(headers)
    '''
    #a 追加
    with open("D:/Awei/零食.csv",'a',encoding='utf-8',newline='') as f:
        f_csv=csv.writer(f)
        list=[]
        list.append(str(product.get('title')))
        list.append(str(product.get('deal')))
        list.append(str(product.get('shop')))
        list.append(str(product.get('location')))
        print(list)
        f_csv.writerow(list)


def get_products():

        wait.until(EC.presence_of_element_located((By.CSS_SELECTOR,'#mainsrp-itemlist')))
        html=brower.page_source
        doc=pq(html)
        items=doc('#mainsrp-itemlist .items .item').items()
        for item in items:

            deal=re.sub(u"([^\u4e00-\u9fa5\u0030-\u0039\u0041-\u005a\u0061-\u007a])", "", item.find('.deal-cnt').text()[0:-3]).replace("万","000")
            title=re.sub(u"([^\u4e00-\u9fa5\u0030-\u0039\u0041-\u005a\u0061-\u007a])", "", item.find('.title').text())
            shop=re.sub(u"([^\u4e00-\u9fa5\u0030-\u0039\u0041-\u005a\u0061-\u007a])", "", item.find('.shop').text())
            location=re.sub(u"([^\u4e00-\u9fa5\u0030-\u0039\u0041-\u005a\u0061-\u007a])", "", item.find('.location').text())
            product={
                'deal':deal,
                'title':title,
                'shop':shop,
                'location':location
            }
            print(product)
            #save_xsl(product)
            #insert_sql(product)
def main():
    search()
    for i in range(2,51):
        next_page(i)

if __name__ == '__main__':
    main()