import pymysql, pandas, re, time
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from datetime import datetime
from urllib.parse import quote_plus
settings = {'host': '172.16.10.187', 'user': 'tidb', 'port': 3306, 'password': 'Tidb@guest@123', 'database': 'crawl',
            'charset': 'utf8'}
conn = pymysql.connect(**settings)
print('##################################################数据库连接成功##################################################')
cursor = conn.cursor()
print('##################################################获取游标成功##################################################')

sql = "insert into rose_flower_copy1(title, channel, price, number, average_price,classify, shop, search, create_time) values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"


# 数据去重并生成新表
def remove_duplicate(s_table, a_table):
    print('##################################################去重并生成新表##################################################')
    sql = "select * from  " + s_table
    data1 = pandas.read_sql(sql, conn)
    data2 = data1.drop_duplicates(subset=["url"], keep="first")
    pandas.io.sql.to_sql(data2, a_table, con='mysql+pymysql://zy001:' + quote_plus(
        "zy@123") + '@172.16.10.201:3306/crawl?charset=utf8mb4', index=False, if_exists='append')
    info = '##################################################{s_table}表数据已经去重并生成新表:{a_table}##################################################'.format(
        s_table=s_table, a_table=a_table)
    print(info)
    return info

def save_data(driver,title,channel,search,classify):
    shop = driver.find_element_by_xpath('//*[@id="crumb-wrap"]/div/div[2]/div[2]/div[1]/div/a').text
    price = driver.find_element_by_xpath('//span[@class="p-price"]/span[2]').text
    number = re.findall('\d+', title)
    if len(number) == 0:
        number = -1
        average_price = -1
    else:
        number = number[0]
        average_price = float(price) / int(number)
    row = (title, channel, price, number, average_price, classify, shop, search, datetime.now())
    cursor.execute(sql,row)
    conn.commit()


if __name__ == '__main__':
    option = ChromeOptions()
    option.add_experimental_option('excludeSwitches', ['enable-automation'])
    option.add_argument('--ignore-certificate-errors')
    driver = webdriver.Chrome(executable_path='chromedriver', options=option)
    channel = '京东'
    dic = {'欧若拉': '欧若拉玫瑰', '碎冰蓝': '碎冰蓝玫瑰'}
    for classify, search in dic.items():
        url = 'https://www.jd.com/'
        driver.get(url)
        #  搜索框输入
        driver.find_element_by_xpath('//*[@id="key"]').send_keys(search)
        # 点击搜索
        driver.find_element_by_xpath('//*[@id="search"]/div/div[2]/button').click()
        time.sleep(2)
        # 点击排序
        driver.find_element_by_xpath('//*[@id="J_filter"]/div[1]/div[1]/a[2]/span').click()
        # 获取列表
        urls = driver.find_elements_by_xpath('//*[@id="J_goodsList"]/ul/li/div/div[3]/a')
        time.sleep(2)
        # 获取链接
        url_list = [i.get_attribute('href') for i in driver.find_elements_by_xpath('//*[@id="J_goodsList"]/ul/li/div/div[3]/a') if classify in i.text]

        # 请求每一个链接,获取内容
        for i in url_list[:10]:
            driver.get(i)
            # 获取规格列表
            specifications = driver.find_elements_by_xpath('//*[@id="choose-attr-1"]/div[2]/div')
            for j in range(len(specifications)):
                if classify in driver.find_elements_by_xpath('//*[@id="choose-attr-1"]/div[2]/div')[j].text:
                    # 点击欧若拉选框,防止默认未选中
                    try:
                        driver.find_elements_by_xpath('//*[@id="choose-attr-1"]/div[2]/div')[j].click()
                    except Exception as e:
                        print(e)
                    title = driver.find_elements_by_xpath('//*[@id="choose-attr-1"]/div[2]/div')[j].text
                    save_data(driver, title, channel, search, classify)

            # 规格里没有对应类型,当前界面
            if len(specifications) == 0:
                title = driver.find_element_by_xpath('/html/body/div[6]/div/div[2]/div[1]').text
                save_data(driver, title, channel, search, classify)

