from selenium.webdriver import Edge
from selenium.webdriver.common.by import By
import time
import csv
from selenium.webdriver.edge.options import Options # 无头


edge_options = Options()
edge_options.add_argument('--headless')

word = input('请输入你想要获取的商品名称：')

# 创建csv文件并写入表头
f = open(f'{word}.csv', mode='a', encoding='utf-8', newline='')
csv_writer = csv.DictWriter(f,fieldnames=[
    '标题', '价格', '评论数', '店铺名', '详情页'
])
csv_writer.writeheader()    # 写入标头

# 锁定数据
url = 'https://www.jd.com/'
web = Edge(options=edge_options)
web.get(url)
web.implicitly_wait(5)
web.find_element(By.XPATH, '//*[@id="key"]').send_keys(word)
web.find_element(By.XPATH, '//*[@id="search"]/div/div[2]/button').click()
web.implicitly_wait(5)


def drop_down():
    """下滑操作"""
    for x in range(1, 12, 2):
        time.sleep(1)
        j = x / 9
        js = 'document.documentElement.scrollTop = document.documentElement.scrollHeight * %f' % j
        web.execute_script(js)

def get_shop_info():
    """数据解析和保存"""
    drop_down()
    web.implicitly_wait(5)
    lis = web.find_elements(By.CSS_SELECTOR, '#J_goodsList ul li.gl-item')
    for li in lis:
        try:
            title = li.find_element(By.CSS_SELECTOR, '.p-name a').get_attribute('title').replace('\n', '')   # 标题
            price = li.find_element(By.CSS_SELECTOR, '.p-price i').text     # 价格
            price = price + '元'
            commit = li.find_element(By.CSS_SELECTOR, '.p-commit strong a').text    # 评论数
            shop_name = li.find_element(By.CSS_SELECTOR, '.J_im_icon a').text   # 店铺名
            href = li.find_element(By.CSS_SELECTOR, '.p-img a').get_attribute('href')   # 详情页

            print(title, price, commit, shop_name, href)
            dit = {
                '标题': title,
                '价格': price,
                '评论数': commit,
                '店铺名': shop_name,
                '详情页': href,
            }
            csv_writer.writerow(dit)
        except:
            pass

# 循环保存多少页
for page in range(1, 11):
    print(f'正在爬取{page}页数据')
    get_shop_info()
    web.find_element(By.CSS_SELECTOR, '.pn-next').click()

web.quit()      # 运行完自动关闭浏览器
