from selenium import webdriver
from settings import username,password,page_num,word,csv_file
import time
import random
import pandas as pd


chrome=webdriver.Chrome()
chrome.get(url='https://www.taobao.com/')
# 修改了浏览器的内部属性，跳过了登录的滑动验证
chrome.execute_cdp_cmd("Page.addScriptToEvaluateOnNewDocument",{"source": """Object.defineProperty(navigator, 'webdriver', {get: () => undefined})"""})
chrome.implicitly_wait(10)
chrome.maximize_window()
chrome.find_element_by_xpath('//*[@id="q"]').send_keys(word)
time.sleep(random.randint(1, 3))
chrome.find_element_by_xpath('//*[@id="J_TSearchForm"]/div[1]/button').click()
time.sleep(random.randint(1, 3))
chrome.find_element_by_xpath('//*[@id="fm-login-id"]').send_keys(username)
time.sleep(random.randint(1, 3))
chrome.find_element_by_xpath('//*[@id="fm-login-password"]').send_keys(password)
time.sleep(random.randint(1, 3))
chrome.find_element_by_xpath('//*[@id="login-form"]/div[4]/button').click()
time.sleep(random.randint(1, 3))

def parse_data():
    # 多个商品数据解析
    divs = chrome.find_elements_by_xpath('//div[@class="grid g-clearfix"]/div/div')  # 获取所有的div标签
    list=[]
    for div in divs:
        dic={}
        dic['title'] = div.find_element_by_xpath('.//div[@class="row row-2 title"]/a').text  # 商品名字
        dic['price'] = div.find_element_by_xpath('.//strong').text + '元'  # 商品价格
        dic['deal'] = div.find_element_by_xpath('.//div[@class="deal-cnt"]').text  # 付款人数
        dic['shop'] = div.find_element_by_xpath('.//div[@class="shop"]/a/span[2]').text  # 店铺名称
        dic['location'] = div.find_element_by_xpath('.//div[@class="location"]').text  # 店铺地点
        dic['detail_url'] = div.find_element_by_xpath('.//div[@class="row row-2 title"]/a').get_attribute('href')  # 详情页地址
        list.append(dic)
    return list
def get_all_page_data(page_num):
    list=[]
    for page in range(1, page_num+1):
        print(f'-----------------正在爬取第{page}页-----------------')
        data=parse_data()
        list+=data
        chrome.find_element_by_xpath('//li[@class="item next"]/a[@class="J_Ajax num icon-tag"]').click()
        time.sleep(random.randint(2, 3))
    df=pd.DataFrame(list)
    df.to_csv(csv_file,mode='a+')
get_all_page_data(page_num)