#!/usr/bin/env python3
# -*- coding: utf-8 -*-

import bs4

from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.keys import Keys

url = r'https://www.taobao.com'

options = webdriver.ChromeOptions()
if False:
    options.headless = True

headers = {
    'User-Agent': 'user-agent=Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.87 Safari/537.36',
    'Accept-Language': 'zh-CN,zh;q=0.1'
}

cap = DesiredCapabilities.CHROME.copy()

for key, value in headers.items():
    cap['chorme.page.customHeaders.%s' % key] = value

driver = webdriver.Chrome(options=options, desired_capabilities=cap)

driver.get(url)

wait = WebDriverWait(driver, 10)
wait.until(expected_conditions.element_to_be_clickable((By.CLASS_NAME, 'member')))
button = driver.find_element_by_class_name('btn-login')  # Compound class names not permitted
button.click()

# wait.until(expected_conditions.element_to_be_clickable((By.CLASS_NAME, 'iconfont')))
driver.implicitly_wait(5)

# import pickle, pathlib
# pklPath = pathlib.Path('cookies.pkl')
# if pklPath.exists():
#     with open(pklPath, 'rb') as fo:
#         cookies = pickle.load(fo)
#     for cookie in cookies:
#         try:
#             driver.add_cookie(cookie)
#         except:
#             pass
# else:
#     cookies = driver.get_cookies()
#     with open(pklPath, 'wb') as fo:
#         pickle.dump(cookies, fo)
n = driver.window_handles
driver.switch_to.window(n[1]) # switch to new window


# 设置显示等待
wait = WebDriverWait(driver, 100)
wait.until(expected_conditions.presence_of_element_located((By.CLASS_NAME, 'search-combobox-input-wrap')))
search = driver.find_element_by_class_name('search-combobox-input')
search.send_keys("mac键盘膜")

search.send_keys(Keys.ENTER)

soup=bs4.BeautifulSoup(driver.page_source, 'lxml')

items = {'title':[], 'price':[], 'deal':[], 'location':[]}
for item in soup.find_all('div', {'class':'items'}):
    p = item.find('div', {'class':'row row-1'})
    a, b = p.find_all('div')
    items['price'].append(a.text)
    items['deal'].append(b.text)
    p = item.find('div', {'class':'row row-2'})
    items['title'].append(p.title.text)
    p = item.find('div', {'class':'row row-3'})
    items['location'].append(p.location.text)

import pandas as pd
df = pd.DataFrame(data=items)
df.to_excel()

